From 674b97dd392f5991100cff11a8fdaa195f19b10c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?=
Date: Mon, 30 Mar 2026 04:43:43 +0800
Subject: [PATCH 1/3] Fix md compiler native test binding and JSX preservation
---
.../src/compiler/mdx-to-md.test.ts | 71 ++-
.../md-compiler/src/compiler/transformer.ts | 329 ++++++++++++-
libraries/md-compiler/src/lib.rs | 2 -
libraries/md-compiler/src/markdown/index.ts | 35 +-
libraries/md-compiler/src/mdx-to-md.test.ts | 35 ++
libraries/md-compiler/src/mdx-to-md.ts | 49 +-
libraries/md-compiler/src/mdx_to_md.rs | 96 +++-
libraries/md-compiler/src/native-binding.ts | 10 +
libraries/md-compiler/src/serializer.rs | 120 ++++-
libraries/md-compiler/src/transformer.rs | 450 +++++++++++++++++-
10 files changed, 1157 insertions(+), 40 deletions(-)
create mode 100644 libraries/md-compiler/src/native-binding.ts
diff --git a/libraries/md-compiler/src/compiler/mdx-to-md.test.ts b/libraries/md-compiler/src/compiler/mdx-to-md.test.ts
index 0a363f86..27417d45 100644
--- a/libraries/md-compiler/src/compiler/mdx-to-md.test.ts
+++ b/libraries/md-compiler/src/compiler/mdx-to-md.test.ts
@@ -232,7 +232,69 @@ Content inside
it('should handle HTML-like elements', async () => {
const input = 'Some content
'
const result = await mdxToMd(input)
- expect(typeof result).toBe('string') // Should either convert or skip gracefully
+ expect(result).toContain('Some content
')
+ })
+ })
+
+ describe('intrinsic HTML preservation regressions', () => {
+ it('preserves an unhandled intrinsic block with nested image markup', async () => {
+ const input = `
+
+
`
+
+ const result = await mdxToMd(input)
+
+ expect(result).toContain('')
+ expect(result).toContain('
')
+ expect(result).toContain('
')
+ })
+
+ it('preserves an unhandled intrinsic block with inline formatting and links', async () => {
+ const input = `
+ English | 简体中文
+
`
+
+ const result = await mdxToMd(input)
+
+ expect(result).toContain('')
+ expect(result).toContain('English | 简体中文')
+ expect(result).toContain('
')
+ })
+
+ it('preserves the opening sample section without dropping intrinsic blocks', async () => {
+ const input = `
+
+
+
+# China Unemployment Watch
+
+
+ English | 简体中文
+
`
+
+ const result = await mdxToMd(input)
+
+ expect(result).toContain('')
+ expect(result).toContain('# China Unemployment Watch')
+ expect(result).toContain('
')
+ })
+
+ it('evaluates expressions and attribute expressions inside preserved intrinsic blocks', async () => {
+ const input = '
{count}
'
+
+ const result = await mdxToMd(input, {
+ scope: {
+ side: 'right',
+ count: 2,
+ logo: './logo.svg',
+ width: 138
+ }
+ })
+
+ expect(result).toBe('2
')
})
})
@@ -283,6 +345,13 @@ Content inside
expect(result).toContain('[file.js]')
expect(result).toContain('(https://example.com/path/to/file.js)')
})
+
+ it('should not collapse non-URL self-labeled links into autolinks', async () => {
+ const input = '[README](README) and [#section](#section)'
+ const result = await mdxToMd(input)
+
+ expect(result).toBe('[README](README) and [#section](#section)')
+ })
})
describe('yAML frontmatter preservation', () => {
diff --git a/libraries/md-compiler/src/compiler/transformer.ts b/libraries/md-compiler/src/compiler/transformer.ts
index 260f00d6..748c3b03 100644
--- a/libraries/md-compiler/src/compiler/transformer.ts
+++ b/libraries/md-compiler/src/compiler/transformer.ts
@@ -1,16 +1,58 @@
import type {Program} from 'estree' // AST transformation module for lossless MDX to Markdown conversion // transformer.ts
import type {Paragraph, Parent, Root, RootContent, Text} from 'mdast'
-import type {MdxJsxFlowElement} from 'mdast-util-mdx'
+import type {MdxJsxFlowElement, MdxJsxTextElement} from 'mdast-util-mdx'
import type {EvaluateExpressionOptions} from './expression-eval'
import type {ExpressionDiagnosticContext, ProcessingContext} from './types'
+import remarkGfm from 'remark-gfm'
+import remarkStringify from 'remark-stringify'
+import {unified} from 'unified'
import {isMdxComponent, processComponent} from './component-processor'
import {evaluateExpression} from './expression-eval'
import {convertJsxToMarkdown} from './jsx-converter'
import {evaluateJsxExpression, hasJsxInEstree} from './jsx-expression-eval'
type ChildNode = RootContent | Text
+type SourceRenderableNode = ChildNode
+interface PositionedNode {
+ position?: ExpressionDiagnosticContext['position']
+}
+
+interface SourceReplacement {
+ start: number
+ end: number
+ value: string
+}
const FILE_PATH_SUFFIX_PATTERN = /\.\w+$/u
+const INTRINSIC_JSX_NAME_PATTERN = /^[a-z]/u
+const SPREAD_ATTRIBUTE_PREFIX_PATTERN = /^\.\.\./u
+const TRAILING_NEWLINES_PATTERN = /\n+$/u
+const INLINE_RENDERABLE_TYPES = new Set([
+ 'break',
+ 'delete',
+ 'emphasis',
+ 'html',
+ 'image',
+ 'inlineCode',
+ 'link',
+ 'strong',
+ 'text'
+])
+const MARKDOWN_STRINGIFIER = unified()
+ .use(remarkGfm)
+ .use(remarkStringify, {
+ bullet: '-',
+ fence: '`',
+ fences: true,
+ emphasis: '*',
+ strong: '*',
+ rule: '-',
+ handlers: {
+ text(node: {value: string}) {
+ return node.value
+ }
+ }
+ })
function createExpressionOptions(
ctx: ProcessingContext,
@@ -39,6 +81,284 @@ function simplifyLinkText(text: string): string {
return text.slice(lastSlashIndex + 1)
}
+function isIntrinsicJsxName(name: string | null | undefined): name is string {
+ if (name == null || name === '') return false
+ return INTRINSIC_JSX_NAME_PATTERN.test(name) || name.includes('-')
+}
+
+function isInlineRenderableNode(node: ChildNode): boolean {
+ return INLINE_RENDERABLE_TYPES.has(node.type)
+}
+
+function getNodeSourceSlice(
+ node: PositionedNode,
+ ctx: ProcessingContext
+): string | undefined {
+ const startOffset = node.position?.start.offset
+ const endOffset = node.position?.end?.offset
+
+ if (ctx.sourceText == null || startOffset == null || endOffset == null || startOffset >= endOffset) return void 0
+
+ return ctx.sourceText.slice(startOffset, endOffset)
+}
+
+function applySourceReplacements(
+ sourceSlice: string,
+ startOffset: number,
+ replacements: SourceReplacement[]
+): string {
+ let rendered = sourceSlice
+
+ for (const replacement of [...replacements].sort((left, right) => right.start - left.start)) {
+ const relativeStart = replacement.start - startOffset
+ const relativeEnd = replacement.end - startOffset
+
+ if (relativeStart < 0 || relativeEnd < relativeStart || relativeEnd > rendered.length) continue
+
+ rendered = rendered.slice(0, relativeStart) + replacement.value + rendered.slice(relativeEnd)
+ }
+
+ return rendered
+}
+
+function escapeHtmlAttributeValue(value: string): string {
+ return value
+ .replaceAll('&', '&')
+ .replaceAll('"', '"')
+ .replaceAll('<', '<')
+}
+
+function evaluateExpressionValue(
+ expression: string,
+ ctx: ProcessingContext,
+ node: PositionedNode,
+ nodeType: string
+): unknown {
+ const trimmed = expression.trim()
+ if (trimmed === '') return ''
+
+ const scopeKeys = Object.keys(ctx.scope)
+ const scopeValues = scopeKeys.map(key => ctx.scope[key])
+
+ try {
+ // eslint-disable-next-line ts/no-implied-eval, no-new-func
+ const fn = new Function(...scopeKeys, `return (${trimmed})`) as (...args: unknown[]) => unknown
+ return fn(...scopeValues)
+ }
+ catch (error) {
+ evaluateExpression(expression, ctx.scope, createExpressionOptions(ctx, node, nodeType))
+ throw error
+ }
+}
+
+function stringifyHtmlAttribute(
+ name: string,
+ value: unknown
+): string | null {
+ if (value == null || value === false) return null
+ if (value === true) return name
+
+ const serialized
+ = typeof value === 'string'
+ ? value
+ : typeof value === 'number' || typeof value === 'bigint'
+ ? String(value)
+ : typeof value === 'boolean'
+ ? String(value)
+ : JSON.stringify(value)
+
+ return `${name}="${escapeHtmlAttributeValue(serialized)}"`
+}
+
+function serializeIntrinsicAttributes(
+ attributes: MdxJsxFlowElement['attributes'] | MdxJsxTextElement['attributes'],
+ ctx: ProcessingContext
+): string {
+ const rendered: string[] = []
+
+ for (const attribute of attributes) {
+ if (attribute.type === 'mdxJsxAttribute') {
+ if (attribute.value == null) {
+ rendered.push(attribute.name)
+ continue
+ }
+
+ if (typeof attribute.value === 'string') {
+ rendered.push(`${attribute.name}="${escapeHtmlAttributeValue(attribute.value)}"`)
+ continue
+ }
+
+ const evaluated = evaluateExpressionValue(
+ attribute.value.value,
+ ctx,
+ attribute,
+ 'mdxJsxAttributeValueExpression'
+ )
+ const serialized = stringifyHtmlAttribute(attribute.name, evaluated)
+ if (serialized != null) rendered.push(serialized)
+ continue
+ }
+
+ const spreadExpression = attribute.value.replace(SPREAD_ATTRIBUTE_PREFIX_PATTERN, '').trim()
+ const evaluated = evaluateExpressionValue(
+ spreadExpression,
+ ctx,
+ attribute,
+ 'mdxJsxExpressionAttribute'
+ )
+
+ if (evaluated == null || typeof evaluated !== 'object' || Array.isArray(evaluated)) continue
+
+ for (const [name, value] of Object.entries(evaluated as Record)) {
+ const serialized = stringifyHtmlAttribute(name, value)
+ if (serialized != null) rendered.push(serialized)
+ }
+ }
+
+ return rendered.length === 0 ? '' : ` ${rendered.join(' ')}`
+}
+
+function stringifyRenderedNodes(nodes: ChildNode[]): string {
+ if (nodes.length === 0) return ''
+
+ const root: Root = nodes.every(isInlineRenderableNode)
+ ? {
+ type: 'root',
+ children: [{
+ type: 'paragraph',
+ children: nodes as Text[]
+ } as RootContent]
+ }
+ : {
+ type: 'root',
+ children: nodes as RootContent[]
+ }
+
+ return MARKDOWN_STRINGIFIER.stringify(root).replace(TRAILING_NEWLINES_PATTERN, '')
+}
+
+async function renderGeneratedNodes(nodes: ChildNode[]): Promise {
+ return stringifyRenderedNodes(nodes)
+}
+
+async function renderSourceAwareNode(
+ node: SourceRenderableNode,
+ ctx: ProcessingContext
+): Promise {
+ if (node.type === 'mdxjsEsm') return ''
+
+ if (node.type === 'mdxFlowExpression' || node.type === 'mdxTextExpression') {
+ const estree = (node.data as {estree?: Program} | undefined)?.estree
+ const trimmedValue = node.value.trim()
+
+ if (trimmedValue.startsWith('/*') && trimmedValue.endsWith('*/')) return ''
+
+ if (hasJsxInEstree(estree)) {
+ const rendered = await evaluateJsxExpression(node, ctx, async (children, c) => {
+ const tempRoot: Root = {type: 'root', children}
+ const processed = await processAst(tempRoot, c)
+ return processed.children
+ })
+
+ return renderGeneratedNodes(rendered as ChildNode[])
+ }
+
+ return evaluateExpression(node.value, ctx.scope, createExpressionOptions(ctx, node, node.type))
+ }
+
+ if (node.type === 'mdxJsxFlowElement' || node.type === 'mdxJsxTextElement') {
+ if (node.name != null && isMdxComponent(node.name, ctx)) {
+ const rendered = await processComponent(node, ctx, processAst)
+ return renderGeneratedNodes(rendered as ChildNode[])
+ }
+
+ if (isIntrinsicJsxName(node.name)) return renderIntrinsicElement(node, ctx)
+
+ const converted = convertJsxToMarkdown(node, ctx)
+ if (converted != null) return renderGeneratedNodes(converted as ChildNode[])
+
+ return ''
+ }
+
+ const sourceSlice = getNodeSourceSlice(node, ctx)
+
+ if (!('children' in node) || !Array.isArray(node.children) || node.children.length === 0) {
+ if (sourceSlice != null) return sourceSlice
+ return renderGeneratedNodes([node])
+ }
+
+ if (sourceSlice == null) return renderGeneratedNodes([node])
+
+ const startOffset = node.position?.start.offset
+ if (startOffset == null) return sourceSlice
+
+ const replacements: SourceReplacement[] = []
+ for (const child of node.children as SourceRenderableNode[]) {
+ const childStart = child.position?.start.offset
+ const childEnd = child.position?.end?.offset
+
+ if (childStart == null || childEnd == null || childStart > childEnd) continue
+
+ replacements.push({
+ start: childStart,
+ end: childEnd,
+ value: await renderSourceAwareNode(child, ctx)
+ })
+ }
+
+ return applySourceReplacements(sourceSlice, startOffset, replacements)
+}
+
+function isSelfClosingIntrinsicElement(
+ element: MdxJsxFlowElement | MdxJsxTextElement,
+ ctx: ProcessingContext
+): boolean {
+ return getNodeSourceSlice(element, ctx)?.trimEnd().endsWith('/>') ?? false
+}
+
+async function renderIntrinsicElement(
+ element: MdxJsxFlowElement | MdxJsxTextElement,
+ ctx: ProcessingContext
+): Promise {
+ if (!isIntrinsicJsxName(element.name)) return ''
+
+ const attributes = serializeIntrinsicAttributes(element.attributes, ctx)
+ const renderedChildren = await Promise.all(
+ element.children.map(async child => renderSourceAwareNode(child as SourceRenderableNode, ctx))
+ )
+ const content = renderedChildren.join('')
+
+ if (content === '' && isSelfClosingIntrinsicElement(element, ctx)) return `<${element.name}${attributes} />`
+
+ return `<${element.name}${attributes}>${content}${element.name}>`
+}
+
+async function preserveIntrinsicFlowElement(
+ element: MdxJsxFlowElement,
+ ctx: ProcessingContext
+): Promise {
+ const rendered = await renderIntrinsicElement(element, ctx)
+ if (rendered === '') return []
+
+ return [{
+ type: 'html',
+ value: rendered
+ } as RootContent]
+}
+
+async function preserveIntrinsicTextElement(
+ element: MdxJsxTextElement,
+ ctx: ProcessingContext
+): Promise {
+ const rendered = await renderIntrinsicElement(element, ctx)
+ if (rendered === '') return []
+
+ return [{
+ type: 'text',
+ value: rendered
+ }]
+}
+
/**
* Processes an MDX AST, evaluating expressions and expanding components.
* Import statements (mdxjsEsm nodes) are skipped during transformation.
@@ -133,6 +453,8 @@ async function transformJsxElement(
const converted = convertJsxToMarkdown(element, ctx)
if (converted != null) return converted
+ if (isIntrinsicJsxName(element.name)) return preserveIntrinsicFlowElement(element, ctx)
+
return []
}
@@ -196,6 +518,11 @@ async function transformChildren(
if (node.type === 'paragraph' && 'children' in node) result.push(...node.children)
else result.push(node as ChildNode)
}
+ continue
+ }
+
+ if (isIntrinsicJsxName(textElement.name)) {
+ result.push(...await preserveIntrinsicTextElement(textElement, ctx))
}
continue
}
diff --git a/libraries/md-compiler/src/lib.rs b/libraries/md-compiler/src/lib.rs
index 648e86f4..6361d762 100644
--- a/libraries/md-compiler/src/lib.rs
+++ b/libraries/md-compiler/src/lib.rs
@@ -27,8 +27,6 @@ pub use transformer::ProcessingContext;
#[cfg(feature = "napi")]
mod napi_binding {
- use std::collections::HashMap;
-
use super::{
EvaluationScope, MdxGlobalScope, MdxToMdOptions, mdx_to_md, mdx_to_md_with_metadata,
};
diff --git a/libraries/md-compiler/src/markdown/index.ts b/libraries/md-compiler/src/markdown/index.ts
index f35b0949..8f407c0c 100644
--- a/libraries/md-compiler/src/markdown/index.ts
+++ b/libraries/md-compiler/src/markdown/index.ts
@@ -6,6 +6,7 @@ import process from 'node:process'
import * as YAML from 'yaml'
import {parseMdx} from '../compiler/parser' // Napi binding types
+import {shouldSkipNativeBinding} from '../native-binding'
interface NapiMdCompilerModule {
buildFrontMatter: (frontMatterJson: string) => string
@@ -74,23 +75,25 @@ function loadBindingFromCliBinaryPackage(
}
try {
- const require = createRequire(import.meta.url)
- const {platform, arch} = process
- const platforms: Record = {
- 'win32-x64': ['napi-md-compiler.win32-x64-msvc', 'win32-x64-msvc'],
- 'linux-x64': ['napi-md-compiler.linux-x64-gnu', 'linux-x64-gnu'],
- 'linux-arm64': ['napi-md-compiler.linux-arm64-gnu', 'linux-arm64-gnu'],
- 'darwin-arm64': ['napi-md-compiler.darwin-arm64', 'darwin-arm64'],
- 'darwin-x64': ['napi-md-compiler.darwin-x64', 'darwin-x64']
- }
- const entry = platforms[`${platform}-${arch}`]
- if (entry != null) {
- const [local, suffix] = entry
- try {
- napiBinding = require(`../${local}.node`) as NapiMdCompilerModule
+ if (!shouldSkipNativeBinding()) {
+ const require = createRequire(import.meta.url)
+ const {platform, arch} = process
+ const platforms: Record = {
+ 'win32-x64': ['napi-md-compiler.win32-x64-msvc', 'win32-x64-msvc'],
+ 'linux-x64': ['napi-md-compiler.linux-x64-gnu', 'linux-x64-gnu'],
+ 'linux-arm64': ['napi-md-compiler.linux-arm64-gnu', 'linux-arm64-gnu'],
+ 'darwin-arm64': ['napi-md-compiler.darwin-arm64', 'darwin-arm64'],
+ 'darwin-x64': ['napi-md-compiler.darwin-x64', 'darwin-x64']
}
- catch {
- napiBinding = loadBindingFromCliBinaryPackage(require, suffix)
+ const entry = platforms[`${platform}-${arch}`]
+ if (entry != null) {
+ const [local, suffix] = entry
+ try {
+ napiBinding = require(`../${local}.node`) as NapiMdCompilerModule
+ }
+ catch {
+ napiBinding = loadBindingFromCliBinaryPackage(require, suffix)
+ }
}
}
}
diff --git a/libraries/md-compiler/src/mdx-to-md.test.ts b/libraries/md-compiler/src/mdx-to-md.test.ts
index ec722033..e107fe20 100644
--- a/libraries/md-compiler/src/mdx-to-md.test.ts
+++ b/libraries/md-compiler/src/mdx-to-md.test.ts
@@ -24,4 +24,39 @@ This is content.`
keywords: ['test', 'default']
})
})
+
+ it('serializes URL-labeled links as a single valid link shape', async () => {
+ const content = 'Open [http://localhost:9002](http://localhost:9002) in your browser.'
+
+ const result = await mdxToMd(content)
+
+ expect(result).not.toContain('[[')
+ expect(result).not.toContain(']](')
+ expect(result).toBe('Open in your browser.')
+ })
+
+ it('keeps non-URL self-labeled links in bracketed form', async () => {
+ const result = await mdxToMd('[README](README) and [#section](#section)')
+
+ expect(result).toBe('[README](README) and [#section](#section)')
+ })
+
+ it('keeps formatted URL labels instead of collapsing them into autolinks', async () => {
+ const result = await mdxToMd('[**http://localhost:9002**](http://localhost:9002)')
+
+ expect(result).toBe('[**http://localhost:9002**](http://localhost:9002)')
+ })
+
+ it('evaluates preserved intrinsic HTML before serializing', async () => {
+ const result = await mdxToMd('{count}
', {
+ scope: {
+ side: 'right',
+ count: 2,
+ logo: './logo.svg',
+ width: 138
+ }
+ })
+
+ expect(result).toBe('2
')
+ })
})
diff --git a/libraries/md-compiler/src/mdx-to-md.ts b/libraries/md-compiler/src/mdx-to-md.ts
index f62569ba..536f509d 100644
--- a/libraries/md-compiler/src/mdx-to-md.ts
+++ b/libraries/md-compiler/src/mdx-to-md.ts
@@ -5,6 +5,7 @@ import {createRequire} from 'node:module'
import {dirname, join} from 'node:path'
import process from 'node:process'
import {mdxToMd as fallbackMdxToMd} from './compiler/mdx-to-md'
+import {shouldSkipNativeBinding} from './native-binding'
interface NapiMdCompilerModule {
compileMdxToMd: (content: string, optionsJson?: string | null) => string
@@ -70,24 +71,42 @@ function loadBindingFromCliBinaryPackage(
return null
}
-try {
- const require = createRequire(import.meta.url)
- const {platform, arch} = process
- const platforms: Record = {
- 'win32-x64': ['napi-md-compiler.win32-x64-msvc', 'win32-x64-msvc'],
- 'linux-x64': ['napi-md-compiler.linux-x64-gnu', 'linux-x64-gnu'],
- 'linux-arm64': ['napi-md-compiler.linux-arm64-gnu', 'linux-arm64-gnu'],
- 'darwin-arm64': ['napi-md-compiler.darwin-arm64', 'darwin-arm64'],
- 'darwin-x64': ['napi-md-compiler.darwin-x64', 'darwin-x64']
- }
- const entry = platforms[`${platform}-${arch}`]
- if (entry != null) {
- const [local, suffix] = entry
+function loadLocalBinding(
+ requireFn: ReturnType,
+ local: string
+): NapiMdCompilerModule | null {
+ const candidates = [
+ `./${local}.node`,
+ `../dist/${local}.node`
+ ]
+
+ for (const candidate of candidates) {
try {
- napiBinding = require(`./${local}.node`) as NapiMdCompilerModule
+ const binding = requireFn(candidate) as unknown
+ if (isNapiMdCompilerModule(binding)) return binding
}
catch {
- napiBinding = loadBindingFromCliBinaryPackage(require, suffix)
+ }
+ }
+
+ return null
+}
+
+try {
+ if (!shouldSkipNativeBinding()) {
+ const require = createRequire(import.meta.url)
+ const {platform, arch} = process
+ const platforms: Record = {
+ 'win32-x64': ['napi-md-compiler.win32-x64-msvc', 'win32-x64-msvc'],
+ 'linux-x64': ['napi-md-compiler.linux-x64-gnu', 'linux-x64-gnu'],
+ 'linux-arm64': ['napi-md-compiler.linux-arm64-gnu', 'linux-arm64-gnu'],
+ 'darwin-arm64': ['napi-md-compiler.darwin-arm64', 'darwin-arm64'],
+ 'darwin-x64': ['napi-md-compiler.darwin-x64', 'darwin-x64']
+ }
+ const entry = platforms[`${platform}-${arch}`]
+ if (entry != null) {
+ const [local, suffix] = entry
+ napiBinding = loadLocalBinding(require, local) ?? loadBindingFromCliBinaryPackage(require, suffix)
}
}
}
diff --git a/libraries/md-compiler/src/mdx_to_md.rs b/libraries/md-compiler/src/mdx_to_md.rs
index b056f877..dabdfeef 100644
--- a/libraries/md-compiler/src/mdx_to_md.rs
+++ b/libraries/md-compiler/src/mdx_to_md.rs
@@ -221,7 +221,7 @@ pub fn mdx_to_md(content: &str, options: Option) -> Result\n
\n
\n";
+ let result = mdx_to_md(source, None).unwrap();
+
+ assert!(result.contains(" in your browser.");
+ }
+
+ #[test]
+ fn test_non_url_self_labeled_links_remain_bracketed() {
+ let source = "[README](README) and [#section](#section)\n";
+ let result = mdx_to_md(source, None).unwrap();
+
+ assert_eq!(result, "[README](README) and [#section](#section)");
+ }
+
+ #[test]
+ fn test_formatted_url_labels_do_not_collapse_to_autolinks() {
+ let source = "[**http://localhost:9002**](http://localhost:9002)\n";
+ let result = mdx_to_md(source, None).unwrap();
+
+ assert_eq!(result, "[**http://localhost:9002**](http://localhost:9002)");
+ }
+
+ #[test]
+ fn test_preserved_intrinsic_html_evaluates_children_and_attributes() {
+ let source = "{count}
\n";
+ let mut scope = EvaluationScope::new();
+ scope.insert("side".into(), json!("right"));
+ scope.insert("count".into(), json!(2));
+ scope.insert("logo".into(), json!("./logo.svg"));
+ scope.insert("width".into(), json!(138));
+
+ let result = mdx_to_md(
+ source,
+ Some(MdxToMdOptions {
+ scope: Some(scope),
+ ..Default::default()
+ }),
+ )
+ .unwrap();
+
+ assert_eq!(
+ result,
+ "2
"
+ );
+ }
+
+ #[test]
+ fn test_preserves_opening_sample_section() {
+ let source = "\n
\n
\n\n# China Unemployment Watch\n\n\n English | 简体中文\n
\n";
+ let result = mdx_to_md(source, None).unwrap();
+
+ assert!(result.contains(""), "Got: {}", result);
+ assert!(
+ result.contains("# China Unemployment Watch"),
+ "Got: {}",
+ result
+ );
+ assert!(result.contains("
"), "Got: {}", result);
+ }
}
diff --git a/libraries/md-compiler/src/native-binding.ts b/libraries/md-compiler/src/native-binding.ts
new file mode 100644
index 00000000..f8732cc9
--- /dev/null
+++ b/libraries/md-compiler/src/native-binding.ts
@@ -0,0 +1,10 @@
+import process from 'node:process'
+
+export function shouldSkipNativeBinding(): boolean {
+ if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false
+ if (process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1') return true
+
+ return process.env['NODE_ENV'] === 'test'
+ || process.env['VITEST'] != null
+ || process.env['VITEST_WORKER_ID'] != null
+}
diff --git a/libraries/md-compiler/src/serializer.rs b/libraries/md-compiler/src/serializer.rs
index 8d2fa4c8..f26004a5 100644
--- a/libraries/md-compiler/src/serializer.rs
+++ b/libraries/md-compiler/src/serializer.rs
@@ -27,6 +27,8 @@ struct SerializeContext {
list_depth: usize,
/// Whether we're inside a tight list
tight: bool,
+ /// Whether the current serialization target is a link label
+ in_link_label: bool,
}
fn serialize_node(node: &Node, out: &mut String, ctx: &SerializeContext) {
@@ -79,8 +81,28 @@ fn serialize_node(node: &Node, out: &mut String, ctx: &SerializeContext) {
out.push_str("\n```\n\n");
}
Node::Link(link) => {
+ if ctx.in_link_label {
+ let plain_label = collect_plain_text(&link.children);
+ if plain_label.is_empty() {
+ out.push_str(&link.url);
+ } else {
+ out.push_str(&plain_label);
+ }
+ return;
+ }
+
+ if link.title.is_none()
+ && is_bare_autolink_label(&link.children, &link.url)
+ && is_autolink_destination(&link.url)
+ {
+ out.push('<');
+ out.push_str(&link.url);
+ out.push('>');
+ return;
+ }
+
out.push('[');
- serialize_inline_children(&link.children, out, ctx);
+ serialize_link_label_children(&link.children, out, ctx);
out.push_str("](");
out.push_str(&link.url);
if let Some(title) = &link.title {
@@ -106,6 +128,7 @@ fn serialize_node(node: &Node, out: &mut String, ctx: &SerializeContext) {
let child_ctx = SerializeContext {
list_depth: ctx.list_depth + 1,
tight: !list.spread,
+ in_link_label: ctx.in_link_label,
};
for (i, child) in list.children.iter().enumerate() {
if let Node::ListItem(item) = child {
@@ -225,6 +248,17 @@ fn serialize_inline_children(children: &[Node], out: &mut String, ctx: &Serializ
}
}
+fn serialize_link_label_children(children: &[Node], out: &mut String, ctx: &SerializeContext) {
+ let label_ctx = SerializeContext {
+ in_link_label: true,
+ ..ctx.clone()
+ };
+
+ for child in children {
+ serialize_node(child, out, &label_ctx);
+ }
+}
+
fn serialize_list_item_children(children: &[Node], out: &mut String, ctx: &SerializeContext) {
for (i, child) in children.iter().enumerate() {
match child {
@@ -301,6 +335,90 @@ fn serialize_table(table: &markdown::mdast::Table, out: &mut String) {
out.push('\n');
}
+fn is_autolink_destination(url: &str) -> bool {
+ let normalized = url.trim();
+ if normalized.is_empty()
+ || normalized.contains(char::is_whitespace)
+ || normalized.contains('<')
+ || normalized.contains('>')
+ {
+ return false;
+ }
+
+ let lower = normalized.to_ascii_lowercase();
+ lower.starts_with("http://")
+ || lower.starts_with("https://")
+ || lower.starts_with("ftp://")
+ || lower.starts_with("mailto:")
+ || looks_like_email_address(normalized)
+}
+
+fn is_bare_autolink_label(children: &[Node], url: &str) -> bool {
+ match children {
+ [Node::Text(text)] => text.value == url,
+ [Node::Link(link)] => {
+ link.title.is_none() && link.url == url && collect_plain_text(&link.children) == url
+ }
+ _ => false,
+ }
+}
+
+fn looks_like_email_address(value: &str) -> bool {
+ let mut parts = value.split('@');
+ let local_part = parts.next().unwrap_or_default();
+ let domain = parts.next().unwrap_or_default();
+
+ parts.next().is_none()
+ && !local_part.is_empty()
+ && domain.contains('.')
+ && !domain.starts_with('.')
+ && !domain.ends_with('.')
+}
+
+fn collect_plain_text(children: &[Node]) -> String {
+ let mut output = String::new();
+
+ for child in children {
+ collect_node_plain_text(child, &mut output);
+ }
+
+ output
+}
+
+fn collect_node_plain_text(node: &Node, out: &mut String) {
+ match node {
+ Node::Text(text) => out.push_str(&text.value),
+ Node::InlineCode(code) => out.push_str(&code.value),
+ Node::Image(image) => out.push_str(&image.alt),
+ Node::Break(_) => out.push(' '),
+ Node::Html(html) => out.push_str(&html.value),
+ Node::Link(link) => {
+ let label = collect_plain_text(&link.children);
+ if label.is_empty() {
+ out.push_str(&link.url);
+ } else {
+ out.push_str(&label);
+ }
+ }
+ Node::Strong(strong) => {
+ for child in &strong.children {
+ collect_node_plain_text(child, out);
+ }
+ }
+ Node::Emphasis(emphasis) => {
+ for child in &emphasis.children {
+ collect_node_plain_text(child, out);
+ }
+ }
+ Node::Delete(delete) => {
+ for child in &delete.children {
+ collect_node_plain_text(child, out);
+ }
+ }
+ _ => {}
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
diff --git a/libraries/md-compiler/src/transformer.rs b/libraries/md-compiler/src/transformer.rs
index 36cb7071..897770a8 100644
--- a/libraries/md-compiler/src/transformer.rs
+++ b/libraries/md-compiler/src/transformer.rs
@@ -4,7 +4,9 @@
//! and converting JSX elements to Markdown equivalents.
use crate::expression_eval::{EvaluationScope, evaluate_expression};
+use crate::serializer::serialize;
use markdown::mdast::*;
+use serde_json::{Number, Value};
use std::collections::HashMap;
// ---------------------------------------------------------------------------
@@ -21,15 +23,17 @@ pub struct ProcessingContext {
pub components: HashMap,
pub processing_stack: Vec,
pub base_path: Option,
+ pub source_text: Option,
}
impl ProcessingContext {
- pub fn new(scope: EvaluationScope) -> Self {
+ pub fn new(scope: EvaluationScope, source_text: Option) -> Self {
let mut ctx = Self {
scope,
components: HashMap::new(),
processing_stack: Vec::new(),
base_path: None,
+ source_text,
};
register_built_in_components(&mut ctx);
ctx
@@ -117,6 +121,440 @@ fn is_truthy(s: &str) -> bool {
!s.is_empty() && s != "false" && s != "0" && s != "undefined" && s != "null"
}
+fn is_intrinsic_jsx_name(name: &str) -> bool {
+ name.chars()
+ .next()
+ .is_some_and(|character| character.is_ascii_lowercase())
+ || name.contains('-')
+}
+
+#[derive(Debug)]
+struct SourceReplacement {
+ start: usize,
+ end: usize,
+ value: String,
+}
+
+fn get_source_slice(
+ position: Option<&markdown::unist::Position>,
+ source_text: Option<&str>,
+) -> Option {
+ let position = position?;
+ let source_text = source_text?;
+
+ if position.start.offset >= position.end.offset {
+ return None;
+ }
+
+ source_text
+ .get(position.start.offset..position.end.offset)
+ .map(ToString::to_string)
+}
+
+fn is_block_serializable_node(node: &Node) -> bool {
+ matches!(
+ node,
+ Node::Root(_)
+ | Node::Yaml(_)
+ | Node::Heading(_)
+ | Node::Paragraph(_)
+ | Node::Code(_)
+ | Node::List(_)
+ | Node::ListItem(_)
+ | Node::Blockquote(_)
+ | Node::ThematicBreak(_)
+ | Node::Table(_)
+ | Node::Definition(_)
+ | Node::FootnoteDefinition(_)
+ | Node::MdxFlowExpression(_)
+ | Node::MdxjsEsm(_)
+ | Node::Toml(_)
+ | Node::Math(_)
+ | Node::MdxJsxFlowElement(_)
+ )
+}
+
+fn serialize_generated_nodes(nodes: &[Node]) -> String {
+ if nodes.is_empty() {
+ return String::new();
+ }
+
+ let root = if nodes.iter().any(is_block_serializable_node) {
+ Root {
+ children: nodes.to_vec(),
+ position: None,
+ }
+ } else {
+ Root {
+ children: vec![Node::Paragraph(Paragraph {
+ children: nodes.to_vec(),
+ position: None,
+ })],
+ position: None,
+ }
+ };
+
+ serialize(&Node::Root(root))
+}
+
+fn apply_source_replacements(
+ source_slice: &str,
+ start_offset: usize,
+ mut replacements: Vec,
+) -> String {
+ replacements.sort_by(|left, right| right.start.cmp(&left.start));
+
+ let mut rendered = source_slice.to_string();
+ for replacement in replacements {
+ let relative_start = replacement.start.saturating_sub(start_offset);
+ let relative_end = replacement.end.saturating_sub(start_offset);
+
+ if relative_start > relative_end || relative_end > rendered.len() {
+ continue;
+ }
+
+ rendered.replace_range(relative_start..relative_end, &replacement.value);
+ }
+
+ rendered
+}
+
+fn escape_html_attribute_value(value: &str) -> String {
+ value
+ .replace('&', "&")
+ .replace('"', """)
+ .replace('<', "<")
+}
+
+fn looks_like_simple_reference(expression: &str) -> bool {
+ let mut chars = expression.chars().peekable();
+
+ match chars.peek() {
+ Some(c) if c.is_ascii_alphabetic() || *c == '_' || *c == '$' => {
+ chars.next();
+ }
+ _ => return false,
+ }
+
+ chars.all(|character| {
+ character.is_ascii_alphanumeric()
+ || character == '_'
+ || character == '$'
+ || character == '.'
+ })
+}
+
+fn resolve_reference_value(reference: &str, scope: &EvaluationScope) -> Option {
+ let mut parts = reference.split('.');
+ let root_name = parts.next()?;
+ let mut value = scope.get(root_name)?.clone();
+
+ for part in parts {
+ let Value::Object(map) = &value else {
+ return None;
+ };
+ value = map.get(part)?.clone();
+ }
+
+ Some(value)
+}
+
+fn parse_expression_literal_value(expression: &str) -> Option {
+ if ((expression.starts_with('"') && expression.ends_with('"'))
+ || (expression.starts_with('\'') && expression.ends_with('\'')))
+ && expression.len() >= 2
+ {
+ return Some(Value::String(
+ expression[1..expression.len() - 1].to_string(),
+ ));
+ }
+
+ match expression {
+ "true" => return Some(Value::Bool(true)),
+ "false" => return Some(Value::Bool(false)),
+ "null" | "undefined" => return Some(Value::Null),
+ _ => {}
+ }
+
+ if let Ok(number) = expression.parse::() {
+ return Some(Value::Number(number.into()));
+ }
+
+ if let Ok(number) = expression.parse::() {
+ return Some(Value::Number(number.into()));
+ }
+
+ if let Ok(number) = expression.parse::() {
+ if let Some(number) = Number::from_f64(number) {
+ return Some(Value::Number(number));
+ }
+ }
+
+ None
+}
+
+fn evaluate_attribute_expression_value(expression: &str, scope: &EvaluationScope) -> Option {
+ let trimmed = expression.trim();
+ if trimmed.is_empty() {
+ return Some(Value::String(String::new()));
+ }
+
+ if let Some(literal) = parse_expression_literal_value(trimmed) {
+ return Some(literal);
+ }
+
+ if looks_like_simple_reference(trimmed) {
+ return resolve_reference_value(trimmed, scope);
+ }
+
+ let rendered = evaluate_expression(trimmed, scope).ok()?;
+ if rendered.is_empty() {
+ return Some(Value::String(rendered));
+ }
+
+ serde_json::from_str::(&rendered)
+ .ok()
+ .or_else(|| Some(Value::String(rendered)))
+}
+
+fn stringify_html_attribute(name: &str, value: &Value) -> Option {
+ match value {
+ Value::Null => None,
+ Value::Bool(true) => Some(name.to_string()),
+ Value::Bool(false) => None,
+ Value::String(value) => Some(format!(
+ r#"{name}="{}""#,
+ escape_html_attribute_value(value)
+ )),
+ Value::Number(value) => Some(format!(
+ r#"{name}="{}""#,
+ escape_html_attribute_value(&value.to_string())
+ )),
+ Value::Array(_) | Value::Object(_) => {
+ let serialized = serde_json::to_string(value).ok()?;
+ Some(format!(
+ r#"{name}="{}""#,
+ escape_html_attribute_value(&serialized)
+ ))
+ }
+ }
+}
+
+fn serialize_intrinsic_attributes(
+ attributes: &[AttributeContent],
+ scope: &EvaluationScope,
+) -> String {
+ let mut rendered = Vec::new();
+
+ for attribute in attributes {
+ match attribute {
+ AttributeContent::Property(property) => match &property.value {
+ None => rendered.push(property.name.clone()),
+ Some(AttributeValue::Literal(value)) => rendered.push(format!(
+ r#"{}="{}""#,
+ property.name,
+ escape_html_attribute_value(value)
+ )),
+ Some(AttributeValue::Expression(expression)) => {
+ let Some(value) = evaluate_attribute_expression_value(&expression.value, scope)
+ else {
+ continue;
+ };
+
+ if let Some(serialized) = stringify_html_attribute(&property.name, &value) {
+ rendered.push(serialized);
+ }
+ }
+ },
+ AttributeContent::Expression(expression) => {
+ let spread_expression = expression.value.trim_start_matches("...").trim();
+ let Some(Value::Object(map)) =
+ evaluate_attribute_expression_value(spread_expression, scope)
+ else {
+ continue;
+ };
+
+ for (name, value) in map {
+ if let Some(serialized) = stringify_html_attribute(&name, &value) {
+ rendered.push(serialized);
+ }
+ }
+ }
+ }
+ }
+
+ if rendered.is_empty() {
+ String::new()
+ } else {
+ format!(" {}", rendered.join(" "))
+ }
+}
+
+fn is_self_closing_intrinsic_element(
+ position: Option<&markdown::unist::Position>,
+ source_text: Option<&str>,
+) -> bool {
+ get_source_slice(position, source_text).is_some_and(|source| source.trim_end().ends_with("/>"))
+}
+
+fn render_source_aware_node(node: &Node, ctx: &ProcessingContext) -> String {
+ match node {
+ Node::MdxjsEsm(_) => String::new(),
+ Node::MdxFlowExpression(expression) => {
+ let trimmed = expression.value.trim();
+ if trimmed.starts_with("/*") && trimmed.ends_with("*/") {
+ return String::new();
+ }
+ evaluate_expression(&expression.value, &ctx.scope).unwrap_or_default()
+ }
+ Node::MdxTextExpression(expression) => {
+ let trimmed = expression.value.trim();
+ if trimmed.starts_with("/*") && trimmed.ends_with("*/") {
+ return String::new();
+ }
+ evaluate_expression(&expression.value, &ctx.scope).unwrap_or_default()
+ }
+ Node::MdxJsxFlowElement(element) => {
+ let name = element.name.as_deref().unwrap_or_default();
+ if let Some(handler) = ctx.components.get(name) {
+ return serialize_generated_nodes(&handler(element, ctx));
+ }
+ if is_intrinsic_jsx_name(name) {
+ return render_intrinsic_element(
+ name,
+ &element.attributes,
+ &element.children,
+ element.position.as_ref(),
+ ctx,
+ );
+ }
+ convert_jsx_to_markdown(element, ctx)
+ .map(|nodes| serialize_generated_nodes(&nodes))
+ .unwrap_or_default()
+ }
+ Node::MdxJsxTextElement(element) => {
+ let name = element.name.as_deref().unwrap_or_default();
+ if name == "Md.Line" {
+ if evaluate_when_condition_text(element, ctx) {
+ return extract_text_content(&element.children, &ctx.scope);
+ }
+ return String::new();
+ }
+ if name == "Md" {
+ if evaluate_when_condition_text(element, ctx) {
+ return serialize_generated_nodes(&transform_inline_children(
+ &element.children,
+ ctx,
+ ));
+ }
+ return String::new();
+ }
+ if is_intrinsic_jsx_name(name) {
+ return render_intrinsic_element(
+ name,
+ &element.attributes,
+ &element.children,
+ element.position.as_ref(),
+ ctx,
+ );
+ }
+ convert_jsx_text_to_markdown(element, ctx)
+ .map(|nodes| serialize_generated_nodes(&nodes))
+ .unwrap_or_default()
+ }
+ _ => {
+ let source_slice = get_source_slice(node.position(), ctx.source_text.as_deref());
+ let Some(children) = node.children() else {
+ return source_slice.unwrap_or_else(|| serialize_generated_nodes(&[node.clone()]));
+ };
+
+ if children.is_empty() {
+ return source_slice.unwrap_or_else(|| serialize_generated_nodes(&[node.clone()]));
+ }
+
+ let Some(source_slice) = source_slice else {
+ return serialize_generated_nodes(&[node.clone()]);
+ };
+ let Some(start_offset) = node.position().map(|position| position.start.offset) else {
+ return source_slice;
+ };
+
+ let replacements = children
+ .iter()
+ .filter_map(|child| {
+ let position = child.position()?;
+ Some(SourceReplacement {
+ start: position.start.offset,
+ end: position.end.offset,
+ value: render_source_aware_node(child, ctx),
+ })
+ })
+ .collect();
+
+ apply_source_replacements(&source_slice, start_offset, replacements)
+ }
+ }
+}
+
+fn render_intrinsic_element(
+ name: &str,
+ attributes: &[AttributeContent],
+ children: &[Node],
+ position: Option<&markdown::unist::Position>,
+ ctx: &ProcessingContext,
+) -> String {
+ let attributes = serialize_intrinsic_attributes(attributes, &ctx.scope);
+ let content = children
+ .iter()
+ .map(|child| render_source_aware_node(child, ctx))
+ .collect::();
+
+ if content.is_empty() && is_self_closing_intrinsic_element(position, ctx.source_text.as_deref())
+ {
+ return format!("<{name}{attributes} />");
+ }
+
+ format!("<{name}{attributes}>{content}{name}>")
+}
+
+fn preserve_intrinsic_flow_element(
+ element: &MdxJsxFlowElement,
+ ctx: &ProcessingContext,
+) -> Option> {
+ let name = element.name.as_deref()?;
+ let rendered = render_intrinsic_element(
+ name,
+ &element.attributes,
+ &element.children,
+ element.position.as_ref(),
+ ctx,
+ );
+
+ Some(vec![Node::Html(Html {
+ value: rendered,
+ position: element.position.clone(),
+ })])
+}
+
+fn preserve_intrinsic_text_element(
+ element: &MdxJsxTextElement,
+ ctx: &ProcessingContext,
+) -> Option> {
+ let name = element.name.as_deref()?;
+ let rendered = render_intrinsic_element(
+ name,
+ &element.attributes,
+ &element.children,
+ element.position.as_ref(),
+ ctx,
+ );
+
+ Some(vec![Node::Text(Text {
+ value: rendered,
+ position: element.position.clone(),
+ })])
+}
+
/// Extract text content from child nodes, evaluating expressions.
fn extract_text_content(children: &[Node], scope: &EvaluationScope) -> String {
let mut result = String::new();
@@ -431,6 +869,10 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec {
result.extend(transform_children(&nodes, ctx));
} else if let Some(converted) = convert_jsx_to_markdown(element, ctx) {
result.extend(converted);
+ } else if is_intrinsic_jsx_name(name) {
+ if let Some(preserved) = preserve_intrinsic_flow_element(element, ctx) {
+ result.extend(preserved);
+ }
}
// Unknown JSX elements are silently skipped
}
@@ -629,6 +1071,10 @@ fn transform_inline_children(children: &[Node], ctx: &ProcessingContext) -> Vec<
}
} else if let Some(converted) = convert_jsx_text_to_markdown(element, ctx) {
result.extend(converted);
+ } else if is_intrinsic_jsx_name(name) {
+ if let Some(preserved) = preserve_intrinsic_text_element(element, ctx) {
+ result.extend(preserved);
+ }
}
// Unknown inline JSX elements are silently skipped
}
@@ -715,7 +1161,7 @@ mod tests {
fn compile(source: &str, scope: EvaluationScope) -> String {
let ast = parse_mdx(source).unwrap();
- let ctx = ProcessingContext::new(scope);
+ let ctx = ProcessingContext::new(scope, Some(source.to_string()));
let transformed = transform_ast(&ast, &ctx);
serialize(&transformed)
}
From ffa274278c1f191ad3cbda24ed4ccf6b5d6eef41 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?=
Date: Mon, 30 Mar 2026 08:30:23 +0800
Subject: [PATCH 2/3] Rename workspace wording to project in docs
---
cli/scripts/benchmark-cleanup.ts | 9 +-
cli/scripts/cleanup-native-smoke.ts | 30 +-
cli/src/cleanup/empty-directories.ts | 97 ++++
cli/src/commands/CleanupUtils.adapter.test.ts | 9 +-
cli/src/commands/CleanupUtils.fallback.ts | 508 ------------------
cli/src/commands/CleanupUtils.test.ts | 124 ++++-
cli/src/commands/CleanupUtils.ts | 246 +++++----
.../ProtectedDeletionCommands.test.ts | 45 ++
cli/src/core/cleanup.rs | 263 ++++++++-
cli/src/core/desk-paths.ts | 122 +++--
cli/src/core/desk_paths.rs | 106 ++++
cli/src/core/native-binding.ts | 8 +
cli/src/plugins/desk-paths.test.ts | 112 ++--
cli/src/plugins/plugin-core/filters.ts | 102 +---
cli/test/native-binding/cleanup.ts | 499 +++++++++++++++++
.../native-binding/desk-paths.ts} | 36 +-
cli/test/setup-native-binding.ts | 215 ++++++++
cli/tsconfig.test.json | 1 +
cli/vitest.config.ts | 1 +
doc/app/home-page.mdx | 2 +-
doc/content/cli/_meta.ts | 2 +-
doc/content/cli/cli-commands.mdx | 4 +-
doc/content/cli/dry-run-and-clean.mdx | 4 +
doc/content/cli/index.mdx | 8 +-
doc/content/cli/schema.mdx | 2 +-
doc/content/cli/troubleshooting.mdx | 2 +-
doc/content/cli/workspace-setup.mdx | 10 +-
doc/content/index.mdx | 4 +-
doc/content/mcp/index.mdx | 2 +-
doc/content/mcp/server-tools.mdx | 4 +-
doc/content/technical-details/_meta.ts | 2 +-
.../global-and-workspace-prompts.mdx | 8 +-
doc/content/technical-details/index.mdx | 2 +-
doc/content/technical-details/pipeline.mdx | 2 +-
.../technical-details/source-of-truth.mdx | 2 +-
doc/lib/site.ts | 8 +-
36 files changed, 1721 insertions(+), 880 deletions(-)
create mode 100644 cli/src/cleanup/empty-directories.ts
delete mode 100644 cli/src/commands/CleanupUtils.fallback.ts
create mode 100644 cli/test/native-binding/cleanup.ts
rename cli/{src/core/desk-paths-fallback.ts => test/native-binding/desk-paths.ts} (88%)
create mode 100644 cli/test/setup-native-binding.ts
diff --git a/cli/scripts/benchmark-cleanup.ts b/cli/scripts/benchmark-cleanup.ts
index 7037235b..9c0cb13f 100644
--- a/cli/scripts/benchmark-cleanup.ts
+++ b/cli/scripts/benchmark-cleanup.ts
@@ -10,7 +10,6 @@ delete process.env['VITEST']
delete process.env['VITEST_WORKER_ID']
const cleanupModule = await import('../src/commands/CleanupUtils')
-const fallbackModule = await import('../src/commands/CleanupUtils.fallback')
const pluginCore = await import('../src/plugins/plugin-core')
function createMockLogger(): ILogger {
@@ -136,15 +135,9 @@ async function main(): Promise {
const iterations = 25
process.stdout.write(`cleanup benchmark iterations=${iterations}\n`)
- const fallbackAvg = await measure('fallback-plan', iterations, async () => {
- await fallbackModule.collectDeletionTargets([plugin], cleanCtx)
- })
- const nativeAvg = await measure('native-plan', iterations, async () => {
+ await measure('native-plan', iterations, async () => {
await cleanupModule.collectDeletionTargets([plugin], cleanCtx)
})
-
- const delta = nativeAvg - fallbackAvg
- process.stdout.write(`delta=${delta.toFixed(2)}ms (${((delta / fallbackAvg) * 100).toFixed(2)}%)\n`)
}
finally {
fs.rmSync(tempDir, {recursive: true, force: true})
diff --git a/cli/scripts/cleanup-native-smoke.ts b/cli/scripts/cleanup-native-smoke.ts
index 9174df3f..1e9c84b6 100644
--- a/cli/scripts/cleanup-native-smoke.ts
+++ b/cli/scripts/cleanup-native-smoke.ts
@@ -9,7 +9,6 @@ delete process.env['VITEST']
delete process.env['VITEST_WORKER_ID']
const cleanupModule = await import('../src/commands/CleanupUtils')
-const fallbackModule = await import('../src/commands/CleanupUtils.fallback')
const pluginCore = await import('../src/plugins/plugin-core')
function createMockLogger(): ILogger {
@@ -106,21 +105,18 @@ async function main(): Promise {
const cleanCtx = createCleanContext(workspaceDir)
const nativePlan = await cleanupModule.collectDeletionTargets([plugin], cleanCtx)
- const fallbackPlan = await fallbackModule.collectDeletionTargets([plugin], cleanCtx)
-
- const sortPaths = (value: {filesToDelete: string[], dirsToDelete: string[], excludedScanGlobs: string[]}) => ({
- ...value,
- filesToDelete: [...value.filesToDelete].sort(),
- dirsToDelete: [...value.dirsToDelete].sort(),
- excludedScanGlobs: [...value.excludedScanGlobs].sort()
- })
-
- if (JSON.stringify(sortPaths(nativePlan)) !== JSON.stringify(sortPaths(fallbackPlan))) {
- throw new Error(`Native cleanup plan mismatch.\nNative: ${JSON.stringify(nativePlan, null, 2)}\nFallback: ${JSON.stringify(fallbackPlan, null, 2)}`)
+ expectSetEqual(nativePlan.filesToDelete, [rootOutput, childOutput], 'native cleanup plan files')
+ expectSetEqual(nativePlan.dirsToDelete, [
+ legacySkillDir,
+ path.join(workspaceDir, 'project-a', 'commands'),
+ path.join(workspaceDir, 'project-a')
+ ], 'native cleanup plan directories')
+ if (nativePlan.violations.length > 0 || nativePlan.conflicts.length > 0) {
+ throw new Error(`Unexpected native cleanup plan: ${JSON.stringify(nativePlan, null, 2)}`)
}
const result = await cleanupModule.performCleanup([plugin], cleanCtx, createMockLogger())
- if (result.deletedFiles !== 2 || result.deletedDirs !== 1 || result.errors.length > 0) {
+ if (result.deletedFiles !== 2 || result.deletedDirs !== 3 || result.errors.length > 0) {
throw new Error(`Unexpected native cleanup result: ${JSON.stringify(result, null, 2)}`)
}
@@ -138,4 +134,12 @@ async function main(): Promise {
}
}
+function expectSetEqual(actual: readonly string[], expected: readonly string[], label: string): void {
+ const actualSorted = [...actual].sort()
+ const expectedSorted = [...expected].sort()
+ if (JSON.stringify(actualSorted) !== JSON.stringify(expectedSorted)) {
+ throw new Error(`Unexpected ${label}: ${JSON.stringify(actualSorted)} !== ${JSON.stringify(expectedSorted)}`)
+ }
+}
+
await main()
diff --git a/cli/src/cleanup/empty-directories.ts b/cli/src/cleanup/empty-directories.ts
new file mode 100644
index 00000000..1491ba9b
--- /dev/null
+++ b/cli/src/cleanup/empty-directories.ts
@@ -0,0 +1,97 @@
+import type * as fs from 'node:fs'
+import {resolveAbsolutePath} from '../ProtectedDeletionGuard'
+
+const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES = new Set([
+ '.git',
+ 'node_modules',
+ 'dist',
+ 'target',
+ '.next',
+ '.turbo',
+ 'coverage',
+ '.nyc_output',
+ '.cache',
+ '.vite',
+ '.vite-temp',
+ '.pnpm-store',
+ '.yarn',
+ '.idea',
+ '.vscode'
+])
+
+export interface WorkspaceEmptyDirectoryPlan {
+ readonly emptyDirsToDelete: string[]
+}
+
+export interface WorkspaceEmptyDirectoryPlannerOptions {
+ readonly fs: typeof import('node:fs')
+ readonly path: typeof import('node:path')
+ readonly workspaceDir: string
+ readonly filesToDelete: readonly string[]
+ readonly dirsToDelete: readonly string[]
+}
+
+function shouldSkipEmptyDirectoryTree(
+ nodePath: typeof import('node:path'),
+ workspaceDir: string,
+ currentDir: string
+): boolean {
+ if (currentDir === workspaceDir) return false
+ return EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.has(nodePath.basename(currentDir))
+}
+
+export function planWorkspaceEmptyDirectoryCleanup(
+ options: WorkspaceEmptyDirectoryPlannerOptions
+): WorkspaceEmptyDirectoryPlan {
+ const workspaceDir = resolveAbsolutePath(options.workspaceDir)
+ const filesToDelete = new Set(options.filesToDelete.map(resolveAbsolutePath))
+ const dirsToDelete = new Set(options.dirsToDelete.map(resolveAbsolutePath))
+ const emptyDirsToDelete = new Set()
+
+ const collectEmptyDirectories = (currentDir: string): boolean => {
+ if (dirsToDelete.has(currentDir)) return true
+ if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, currentDir)) return false
+
+ let entries: fs.Dirent[]
+ try {
+ entries = options.fs.readdirSync(currentDir, {withFileTypes: true})
+ }
+ catch {
+ return false
+ }
+
+ let hasRetainedEntries = false
+
+ for (const entry of entries) {
+ const entryPath = resolveAbsolutePath(options.path.join(currentDir, entry.name))
+
+ if (dirsToDelete.has(entryPath)) continue
+
+ if (entry.isDirectory()) {
+ if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, entryPath)) {
+ hasRetainedEntries = true
+ continue
+ }
+
+ if (collectEmptyDirectories(entryPath)) {
+ emptyDirsToDelete.add(entryPath)
+ continue
+ }
+
+ hasRetainedEntries = true
+ continue
+ }
+
+ if (filesToDelete.has(entryPath)) continue
+ hasRetainedEntries = true
+ }
+
+ return !hasRetainedEntries
+ }
+
+ collectEmptyDirectories(workspaceDir)
+
+ return {
+ emptyDirsToDelete: [...emptyDirsToDelete].sort((a, b) => a.localeCompare(b))
+ }
+}
diff --git a/cli/src/commands/CleanupUtils.adapter.test.ts b/cli/src/commands/CleanupUtils.adapter.test.ts
index cdb02db3..2976dcca 100644
--- a/cli/src/commands/CleanupUtils.adapter.test.ts
+++ b/cli/src/commands/CleanupUtils.adapter.test.ts
@@ -12,6 +12,7 @@ const nativeBindingMocks = vi.hoisted(() => ({
vi.mock('../core/native-binding', () => ({
getNativeBinding: () => ({
+ ...globalThis.__TNMSC_TEST_NATIVE_BINDING__,
planCleanup: nativeBindingMocks.planCleanup,
performCleanup: nativeBindingMocks.performCleanup
})
@@ -88,18 +89,20 @@ describe('cleanupUtils native adapter', () => {
nativeBindingMocks.planCleanup.mockReturnValue(JSON.stringify({
filesToDelete: ['/tmp/project-a/AGENTS.md'],
dirsToDelete: ['/tmp/.codex/skills/legacy'],
+ emptyDirsToDelete: ['/tmp/.codex/skills'],
violations: [],
conflicts: [],
excludedScanGlobs: ['**/.git/**']
}))
nativeBindingMocks.performCleanup.mockReturnValue(JSON.stringify({
deletedFiles: 1,
- deletedDirs: 1,
+ deletedDirs: 2,
errors: [],
violations: [],
conflicts: [],
filesToDelete: ['/tmp/project-a/AGENTS.md'],
dirsToDelete: ['/tmp/.codex/skills/legacy'],
+ emptyDirsToDelete: ['/tmp/.codex/skills'],
excludedScanGlobs: ['**/.git/**']
}))
@@ -113,7 +116,7 @@ describe('cleanupUtils native adapter', () => {
const plan = await collectDeletionTargets([plugin], cleanCtx)
expect(plan).toEqual({
filesToDelete: ['/tmp/project-a/AGENTS.md'],
- dirsToDelete: ['/tmp/.codex/skills/legacy'],
+ dirsToDelete: ['/tmp/.codex/skills', '/tmp/.codex/skills/legacy'],
violations: [],
conflicts: [],
excludedScanGlobs: ['**/.git/**']
@@ -136,7 +139,7 @@ describe('cleanupUtils native adapter', () => {
const result = await performCleanup([plugin], cleanCtx, createMockLogger())
expect(result).toEqual({
deletedFiles: 1,
- deletedDirs: 1,
+ deletedDirs: 2,
errors: [],
violations: [],
conflicts: []
diff --git a/cli/src/commands/CleanupUtils.fallback.ts b/cli/src/commands/CleanupUtils.fallback.ts
deleted file mode 100644
index 7119baed..00000000
--- a/cli/src/commands/CleanupUtils.fallback.ts
+++ /dev/null
@@ -1,508 +0,0 @@
-import type {DeletionError} from '../core/desk-paths'
-import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputFileDeclaration, OutputPlugin, PluginOptions} from '../plugins/plugin-core'
-import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard'
-import * as fs from 'node:fs'
-import * as path from 'node:path'
-import glob from 'fast-glob'
-import {
- buildDiagnostic,
- buildFileOperationDiagnostic,
- diagnosticLines
-} from '@/diagnostics'
-import {compactDeletionTargets} from '../cleanup/delete-targets'
-import {deleteTargets as deskDeleteTargets} from '../core/desk-paths'
-import {
- collectAllPluginOutputs
-} from '../plugins/plugin-core'
-import {
- buildComparisonKeys,
- collectConfiguredAindexInputRules,
- collectProjectRoots,
- collectProtectedInputSourceRules,
- createProtectedDeletionGuard,
- logProtectedDeletionGuardError,
- partitionDeletionTargets,
- resolveAbsolutePath
-} from '../ProtectedDeletionGuard'
-
-/**
- * Result of cleanup operation
- */
-export interface CleanupResult {
- readonly deletedFiles: number
- readonly deletedDirs: number
- readonly errors: readonly CleanupError[]
- readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[]
- readonly conflicts: readonly CleanupProtectionConflict[]
- readonly message?: string
-}
-
-/**
- * Error during cleanup operation
- */
-export interface CleanupError {
- readonly path: string
- readonly type: 'file' | 'directory'
- readonly error: unknown
-}
-
-export interface CleanupProtectionConflict {
- readonly outputPath: string
- readonly outputPlugin: string
- readonly protectedPath: string
- readonly protectionMode: ProtectionMode
- readonly protectedBy: string
- readonly reason: string
-}
-
-export class CleanupProtectionConflictError extends Error {
- readonly conflicts: readonly CleanupProtectionConflict[]
-
- constructor(conflicts: readonly CleanupProtectionConflict[]) {
- super(buildCleanupProtectionConflictMessage(conflicts))
- this.name = 'CleanupProtectionConflictError'
- this.conflicts = conflicts
- }
-}
-
-interface CleanupTargetCollections {
- readonly filesToDelete: string[]
- readonly dirsToDelete: string[]
- readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[]
- readonly conflicts: readonly CleanupProtectionConflict[]
- readonly excludedScanGlobs: string[]
-}
-
-const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = [
- '**/node_modules/**',
- '**/.git/**',
- '**/.turbo/**',
- '**/.pnpm-store/**',
- '**/.yarn/**',
- '**/.next/**'
-] as const
-
-function normalizeGlobPattern(pattern: string): string {
- return resolveAbsolutePath(pattern).replaceAll('\\', '/')
-}
-
-function expandCleanupGlob(
- pattern: string,
- ignoreGlobs: readonly string[]
-): readonly string[] {
- const normalizedPattern = normalizeGlobPattern(pattern)
- return glob.sync(normalizedPattern, {
- onlyFiles: false,
- dot: true,
- absolute: true,
- followSymbolicLinks: false,
- ignore: [...ignoreGlobs]
- })
-}
-
-function shouldExcludeCleanupMatch(
- matchedPath: string,
- target: OutputCleanupPathDeclaration
-): boolean {
- if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false
- const basename = path.basename(matchedPath)
- return target.excludeBasenames.includes(basename)
-}
-
-async function collectPluginCleanupDeclarations(
- plugin: OutputPlugin,
- cleanCtx: OutputCleanContext
-): Promise {
- if (plugin.declareCleanupPaths == null) return {}
- return plugin.declareCleanupPaths({...cleanCtx, dryRun: true})
-}
-
-async function collectPluginCleanupSnapshot(
- plugin: OutputPlugin,
- cleanCtx: OutputCleanContext,
- predeclaredOutputs?: ReadonlyMap
-): Promise<{
- readonly plugin: OutputPlugin
- readonly outputs: Awaited>
- readonly cleanup: OutputCleanupDeclarations
-}> {
- const existingOutputDeclarations = predeclaredOutputs?.get(plugin)
- const [outputs, cleanup] = await Promise.all([
- existingOutputDeclarations != null
- ? Promise.resolve(existingOutputDeclarations)
- : plugin.declareOutputFiles({...cleanCtx, dryRun: true}),
- collectPluginCleanupDeclarations(plugin, cleanCtx)
- ])
-
- return {plugin, outputs, cleanup}
-}
-
-function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string {
- const pathList = conflicts.map(conflict => conflict.outputPath).join(', ')
- return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}`
-}
-
-function detectCleanupProtectionConflicts(
- outputPathOwners: ReadonlyMap,
- guard: ReturnType
-): CleanupProtectionConflict[] {
- const conflicts: CleanupProtectionConflict[] = []
-
- for (const [outputPath, outputPlugins] of outputPathOwners.entries()) {
- const outputKeys = new Set(buildComparisonKeys(outputPath))
-
- for (const rule of guard.compiledRules) {
- const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey))
- if (!isExactMatch) continue
-
- for (const outputPlugin of outputPlugins) {
- conflicts.push({
- outputPath,
- outputPlugin,
- protectedPath: rule.path,
- protectionMode: rule.protectionMode,
- protectedBy: rule.source,
- reason: rule.reason
- })
- }
- }
- }
-
- return conflicts.sort((a, b) => {
- const pathDiff = a.outputPath.localeCompare(b.outputPath)
- if (pathDiff !== 0) return pathDiff
- return a.protectedPath.localeCompare(b.protectedPath)
- })
-}
-
-function logCleanupProtectionConflicts(
- logger: ILogger,
- conflicts: readonly CleanupProtectionConflict[]
-): void {
- const firstConflict = conflicts[0]
-
- logger.error(buildDiagnostic({
- code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED',
- title: 'Cleanup output paths conflict with protected inputs',
- rootCause: diagnosticLines(
- `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`,
- firstConflict == null
- ? 'No conflict details were captured.'
- : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".`
- ),
- exactFix: diagnosticLines(
- 'Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'
- ),
- possibleFixes: [
- diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'),
- diagnosticLines('Move the conflicting output target to a generated-only directory.')
- ],
- details: {
- count: conflicts.length,
- conflicts: conflicts.map(conflict => ({
- outputPath: conflict.outputPath,
- outputPlugin: conflict.outputPlugin,
- protectedPath: conflict.protectedPath,
- protectionMode: conflict.protectionMode,
- protectedBy: conflict.protectedBy,
- reason: conflict.reason
- }))
- }
- }))
-}
-
-/**
- * Collect deletion targets from enabled output plugins.
- */
-export async function collectDeletionTargets(
- outputPlugins: readonly OutputPlugin[],
- cleanCtx: OutputCleanContext,
- predeclaredOutputs?: ReadonlyMap
-): Promise<{
- filesToDelete: string[]
- dirsToDelete: string[]
- violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[]
- conflicts: CleanupProtectionConflict[]
- excludedScanGlobs: string[]
-}> {
- const deleteFiles = new Set()
- const deleteDirs = new Set()
- const protectedRules = new Map()
- const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS)
- const outputPathOwners = new Map()
-
- const pluginSnapshots = await Promise.all(
- outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))
- )
-
- const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => {
- if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath))
- else deleteFiles.add(resolveAbsolutePath(rawPath))
- }
-
- const addProtectRule = (
- rawPath: string,
- protectionMode: ProtectionMode,
- reason: string,
- source: string,
- matcher: ProtectionRuleMatcher = 'path'
- ): void => {
- const resolvedPath = resolveAbsolutePath(rawPath)
- protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, {
- path: resolvedPath,
- protectionMode,
- reason,
- source,
- matcher
- })
- }
-
- const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => {
- if (target.protectionMode != null) return target.protectionMode
- return target.kind === 'file' ? 'direct' : 'recursive'
- }
-
- for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source)
- if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) {
- for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, {
- workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path
- })) {
- addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher)
- }
- }
-
- for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) {
- addProtectRule(
- rule.path,
- rule.protectionMode,
- rule.reason ?? 'configured cleanup protection rule',
- 'configured-cleanup-protection',
- rule.matcher ?? 'path'
- )
- }
-
- for (const snapshot of pluginSnapshots) {
- for (const declaration of snapshot.outputs) {
- const resolvedOutputPath = resolveAbsolutePath(declaration.path)
- addDeletePath(resolvedOutputPath, 'file')
- const existingOwners = outputPathOwners.get(resolvedOutputPath)
- if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name])
- else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name)
- }
- for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob))
- }
-
- const excludeScanGlobs = [...excludeScanGlobSet]
-
- const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => {
- for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) {
- if (shouldExcludeCleanupMatch(matchedPath, target)) continue
-
- try {
- const stat = fs.lstatSync(matchedPath)
- if (stat.isDirectory()) addDeletePath(matchedPath, 'directory')
- else addDeletePath(matchedPath, 'file')
- }
- catch {}
- }
- }
-
- const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => {
- const protectionMode = defaultProtectionModeForTarget(target)
- const reason = target.label != null
- ? `plugin cleanup protect declaration (${target.label})`
- : 'plugin cleanup protect declaration'
-
- for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) {
- addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`)
- }
- }
-
- for (const {plugin, cleanup} of pluginSnapshots) {
- for (const target of cleanup.protect ?? []) {
- if (target.kind === 'glob') {
- resolveProtectGlob(target, plugin.name)
- continue
- }
- addProtectRule(
- target.path,
- defaultProtectionModeForTarget(target),
- target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration',
- `plugin-cleanup-protect:${plugin.name}`
- )
- }
-
- for (const target of cleanup.delete ?? []) {
- if (target.kind === 'glob') {
- resolveDeleteGlob(target)
- continue
- }
- if (target.kind === 'directory') addDeletePath(target.path, 'directory')
- else addDeletePath(target.path, 'file')
- }
- }
-
- const guard = createProtectedDeletionGuard({
- workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path,
- projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext),
- rules: [...protectedRules.values()],
- ...cleanCtx.collectedOutputContext.aindexDir != null
- ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir}
- : {}
- })
- const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard)
- if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts)
- const filePartition = partitionDeletionTargets([...deleteFiles], guard)
- const dirPartition = partitionDeletionTargets([...deleteDirs], guard)
-
- const compactedTargets = compactDeletionTargets(
- filePartition.safePaths,
- dirPartition.safePaths
- )
-
- return {
- filesToDelete: compactedTargets.files,
- dirsToDelete: compactedTargets.dirs,
- violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)),
- conflicts: [],
- excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b))
- }
-}
-
-function buildCleanupErrors(
- logger: ILogger,
- errors: readonly DeletionError[],
- type: 'file' | 'directory'
-): CleanupError[] {
- return errors.map(currentError => {
- const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error)
- logger.warn(buildFileOperationDiagnostic({
- code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED',
- title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory',
- operation: 'delete',
- targetKind: type,
- path: currentError.path,
- error: errorMessage,
- details: {
- phase: 'cleanup'
- }
- }))
-
- return {path: currentError.path, type, error: currentError.error}
- })
-}
-
-async function executeCleanupTargets(
- targets: CleanupTargetCollections,
- logger: ILogger
-): Promise<{deletedFiles: number, deletedDirs: number, errors: CleanupError[]}> {
- logger.debug('cleanup delete execution started', {
- filesToDelete: targets.filesToDelete.length,
- dirsToDelete: targets.dirsToDelete.length
- })
-
- const result = await deskDeleteTargets({
- files: targets.filesToDelete,
- dirs: targets.dirsToDelete
- })
-
- const fileErrors = buildCleanupErrors(logger, result.fileErrors, 'file')
- const dirErrors = buildCleanupErrors(logger, result.dirErrors, 'directory')
- const allErrors = [...fileErrors, ...dirErrors]
-
- logger.debug('cleanup delete execution complete', {
- deletedFiles: result.deletedFiles.length,
- deletedDirs: result.deletedDirs.length,
- errors: allErrors.length
- })
-
- return {
- deletedFiles: result.deletedFiles.length,
- deletedDirs: result.deletedDirs.length,
- errors: allErrors
- }
-}
-
-function logCleanupPlanDiagnostics(
- logger: ILogger,
- targets: CleanupTargetCollections
-): void {
- logger.debug('cleanup plan built', {
- filesToDelete: targets.filesToDelete.length,
- dirsToDelete: targets.dirsToDelete.length,
- violations: targets.violations.length,
- conflicts: targets.conflicts.length,
- excludedScanGlobs: targets.excludedScanGlobs
- })
-}
-
-/**
- * Perform cleanup operation for output plugins.
- * This is the main reusable cleanup function that can be called from both
- * CleanCommand and ExecuteCommand (for pre-cleanup).
- */
-export async function performCleanup(
- outputPlugins: readonly OutputPlugin[],
- cleanCtx: OutputCleanContext,
- logger: ILogger,
- predeclaredOutputs?: ReadonlyMap
-): Promise {
- if (predeclaredOutputs != null) {
- const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs)
- logger.debug('Collected outputs for cleanup', {
- projectDirs: outputs.projectDirs.length,
- projectFiles: outputs.projectFiles.length,
- globalDirs: outputs.globalDirs.length,
- globalFiles: outputs.globalFiles.length
- })
- }
-
- let targets: Awaited>
- try {
- targets = await collectDeletionTargets(outputPlugins, cleanCtx, predeclaredOutputs)
- }
- catch (error) {
- if (error instanceof CleanupProtectionConflictError) {
- logCleanupProtectionConflicts(logger, error.conflicts)
- return {
- deletedFiles: 0,
- deletedDirs: 0,
- errors: [],
- violations: [],
- conflicts: error.conflicts,
- message: error.message
- }
- }
- throw error
- }
- const cleanupTargets: CleanupTargetCollections = {
- filesToDelete: targets.filesToDelete,
- dirsToDelete: targets.dirsToDelete,
- violations: targets.violations,
- conflicts: targets.conflicts,
- excludedScanGlobs: targets.excludedScanGlobs
- }
- logCleanupPlanDiagnostics(logger, cleanupTargets)
-
- if (cleanupTargets.violations.length > 0) {
- logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations)
- return {
- deletedFiles: 0,
- deletedDirs: 0,
- errors: [],
- violations: cleanupTargets.violations,
- conflicts: [],
- message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)`
- }
- }
-
- const executionResult = await executeCleanupTargets(cleanupTargets, logger)
-
- return {
- deletedFiles: executionResult.deletedFiles,
- deletedDirs: executionResult.deletedDirs,
- errors: executionResult.errors,
- violations: [],
- conflicts: []
- }
-}
diff --git a/cli/src/commands/CleanupUtils.test.ts b/cli/src/commands/CleanupUtils.test.ts
index fb6919e2..5f26e97a 100644
--- a/cli/src/commands/CleanupUtils.test.ts
+++ b/cli/src/commands/CleanupUtils.test.ts
@@ -43,6 +43,7 @@ function createCleanContext(
overrides?: Partial,
pluginOptionsOverrides?: Parameters[0]
): OutputCleanContext {
+ const workspaceDir = path.resolve('tmp-cleanup-utils-workspace')
return {
logger: createMockLogger(),
fs,
@@ -53,11 +54,10 @@ function createCleanContext(
collectedOutputContext: {
workspace: {
directory: {
- pathKind: FilePathKind.Relative,
- path: '.',
- basePath: '.',
- getDirectoryName: () => '.',
- getAbsolutePath: () => path.resolve('.')
+ pathKind: FilePathKind.Absolute,
+ path: workspaceDir,
+ getDirectoryName: () => path.basename(workspaceDir),
+ getAbsolutePath: () => workspaceDir
},
projects: []
},
@@ -200,7 +200,7 @@ describe('collectDeletionTargets', () => {
expect(result.filesToDelete).toEqual([])
expect(result.violations).toEqual([expect.objectContaining({
targetPath: path.resolve(homeDir),
- protectedPath: path.resolve('knowladge'),
+ protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'),
protectionMode: 'direct'
})])
})
@@ -302,7 +302,11 @@ describe('collectDeletionTargets', () => {
path.resolve(projectChildFile),
path.resolve(safeDistMarkdownFile)
]))
- expect(new Set(result.dirsToDelete)).toEqual(new Set([path.resolve(globalChildDir)]))
+ expect(new Set(result.dirsToDelete)).toEqual(new Set([
+ path.resolve(globalChildDir),
+ path.resolve(aindexSourceDir),
+ path.resolve(workspaceDir, 'project-a')
+ ]))
expect(result.violations).toEqual(expect.arrayContaining([
expect.objectContaining({
targetPath: path.resolve(protectedDistMdxFile),
@@ -600,6 +604,62 @@ describe('collectDeletionTargets', () => {
fs.rmSync(tempDir, {recursive: true, force: true})
}
})
+
+ it('plans workspace empty directories while skipping excluded trees and symlink entries', async () => {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-cleanup-empty-sweep-'))
+ const workspaceDir = path.join(tempDir, 'workspace')
+ const sourceLeafDir = path.join(workspaceDir, 'source', 'empty', 'leaf')
+ const sourceKeepFile = path.join(workspaceDir, 'source', 'keep.md')
+ const distEmptyDir = path.join(workspaceDir, 'dist', 'ghost')
+ const nodeModulesEmptyDir = path.join(workspaceDir, 'node_modules', 'pkg', 'ghost')
+ const gitEmptyDir = path.join(workspaceDir, '.git', 'objects', 'info')
+ const symlinkTarget = path.join(tempDir, 'symlink-target')
+ const symlinkParentDir = path.join(workspaceDir, 'symlink-parent')
+ const symlinkPath = path.join(symlinkParentDir, 'linked')
+
+ fs.mkdirSync(sourceLeafDir, {recursive: true})
+ fs.mkdirSync(path.dirname(sourceKeepFile), {recursive: true})
+ fs.mkdirSync(distEmptyDir, {recursive: true})
+ fs.mkdirSync(nodeModulesEmptyDir, {recursive: true})
+ fs.mkdirSync(gitEmptyDir, {recursive: true})
+ fs.mkdirSync(symlinkTarget, {recursive: true})
+ fs.mkdirSync(symlinkParentDir, {recursive: true})
+ fs.writeFileSync(sourceKeepFile, '# keep', 'utf8')
+
+ try {
+ const symlinkType: 'junction' | 'dir' = process.platform === 'win32' ? 'junction' : 'dir'
+ fs.symlinkSync(symlinkTarget, symlinkPath, symlinkType)
+
+ const ctx = createCleanContext({
+ workspace: {
+ directory: {
+ pathKind: FilePathKind.Absolute,
+ path: workspaceDir,
+ getDirectoryName: () => path.basename(workspaceDir),
+ getAbsolutePath: () => workspaceDir
+ },
+ projects: []
+ }
+ })
+ const plugin = createMockOutputPlugin('MockOutputPlugin', [])
+
+ const result = await collectDeletionTargets([plugin], ctx)
+
+ expect(result.filesToDelete).toEqual([])
+ expect(result.dirsToDelete).toEqual([
+ path.resolve(workspaceDir, 'source', 'empty'),
+ path.resolve(sourceLeafDir)
+ ])
+ expect(result.dirsToDelete).not.toContain(path.resolve(workspaceDir))
+ expect(result.dirsToDelete).not.toContain(path.resolve(distEmptyDir))
+ expect(result.dirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir))
+ expect(result.dirsToDelete).not.toContain(path.resolve(gitEmptyDir))
+ expect(result.dirsToDelete).not.toContain(path.resolve(symlinkParentDir))
+ }
+ finally {
+ fs.rmSync(tempDir, {recursive: true, force: true})
+ }
+ })
})
describe('performCleanup', () => {
@@ -634,13 +694,15 @@ describe('performCleanup', () => {
expect(result).toEqual(expect.objectContaining({
deletedFiles: 1,
- deletedDirs: 1,
+ deletedDirs: 3,
errors: [],
violations: [],
conflicts: []
}))
expect(fs.existsSync(outputFile)).toBe(false)
expect(fs.existsSync(outputDir)).toBe(false)
+ expect(fs.existsSync(path.dirname(outputFile))).toBe(false)
+ expect(fs.existsSync(path.dirname(outputDir))).toBe(false)
}
finally {
fs.rmSync(tempDir, {recursive: true, force: true})
@@ -689,4 +751,50 @@ describe('performCleanup', () => {
fs.rmSync(tempDir, {recursive: true, force: true})
}
})
+
+ it('deletes generated files and then prunes workspace empty directories', async () => {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-perform-cleanup-empty-sweep-'))
+ const outputFile = path.join(tempDir, 'generated', 'AGENTS.md')
+ const emptyLeafDir = path.join(tempDir, 'scratch', 'empty', 'leaf')
+ const retainedScratchFile = path.join(tempDir, 'scratch', 'keep.md')
+
+ fs.mkdirSync(path.dirname(outputFile), {recursive: true})
+ fs.mkdirSync(emptyLeafDir, {recursive: true})
+ fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true})
+ fs.writeFileSync(outputFile, '# agent', 'utf8')
+ fs.writeFileSync(retainedScratchFile, '# keep', 'utf8')
+
+ try {
+ const ctx = createCleanContext({
+ workspace: {
+ directory: {
+ pathKind: FilePathKind.Absolute,
+ path: tempDir,
+ getDirectoryName: () => path.basename(tempDir),
+ getAbsolutePath: () => tempDir
+ },
+ projects: []
+ }
+ })
+ const plugin = createMockOutputPlugin('MockOutputPlugin', [outputFile])
+
+ const result = await performCleanup([plugin], ctx, createMockLogger())
+
+ expect(result).toEqual(expect.objectContaining({
+ deletedFiles: 1,
+ deletedDirs: 3,
+ errors: [],
+ violations: [],
+ conflicts: []
+ }))
+ expect(fs.existsSync(outputFile)).toBe(false)
+ expect(fs.existsSync(path.dirname(outputFile))).toBe(false)
+ expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty', 'leaf'))).toBe(false)
+ expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty'))).toBe(false)
+ expect(fs.existsSync(path.join(tempDir, 'scratch'))).toBe(true)
+ }
+ finally {
+ fs.rmSync(tempDir, {recursive: true, force: true})
+ }
+ })
})
diff --git a/cli/src/commands/CleanupUtils.ts b/cli/src/commands/CleanupUtils.ts
index 78144a8e..5edd3651 100644
--- a/cli/src/commands/CleanupUtils.ts
+++ b/cli/src/commands/CleanupUtils.ts
@@ -1,10 +1,14 @@
-import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputFileDeclaration, OutputPlugin, PluginOptions} from '../plugins/plugin-core'
+import type {
+ ILogger,
+ OutputCleanContext,
+ OutputCleanupDeclarations,
+ OutputCleanupPathDeclaration,
+ OutputFileDeclaration,
+ OutputPlugin,
+ PluginOptions
+} from '../plugins/plugin-core'
import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard'
-import {
- buildDiagnostic,
- buildFileOperationDiagnostic,
- diagnosticLines
-} from '@/diagnostics'
+import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics'
import {getNativeBinding} from '../core/native-binding'
import {collectAllPluginOutputs} from '../plugins/plugin-core'
import {
@@ -13,20 +17,42 @@ import {
collectProtectedInputSourceRules,
logProtectedDeletionGuardError
} from '../ProtectedDeletionGuard'
-import {
- CleanupProtectionConflictError,
- collectDeletionTargets as collectDeletionTargetsFallback,
- performCleanup as performCleanupFallback
-} from './CleanupUtils.fallback'
-
-export type {
- CleanupError,
- CleanupProtectionConflict,
- CleanupResult
-} from './CleanupUtils.fallback'
-export {
- CleanupProtectionConflictError
-} from './CleanupUtils.fallback'
+
+let nativeCleanupBindingCheck: boolean | null = null
+
+export interface CleanupResult {
+ readonly deletedFiles: number
+ readonly deletedDirs: number
+ readonly errors: readonly CleanupError[]
+ readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[]
+ readonly conflicts: readonly CleanupProtectionConflict[]
+ readonly message?: string
+}
+
+export interface CleanupError {
+ readonly path: string
+ readonly type: 'file' | 'directory'
+ readonly error: unknown
+}
+
+export interface CleanupProtectionConflict {
+ readonly outputPath: string
+ readonly outputPlugin: string
+ readonly protectedPath: string
+ readonly protectionMode: ProtectionMode
+ readonly protectedBy: string
+ readonly reason: string
+}
+
+export class CleanupProtectionConflictError extends Error {
+ readonly conflicts: readonly CleanupProtectionConflict[]
+
+ constructor(conflicts: readonly CleanupProtectionConflict[]) {
+ super(buildCleanupProtectionConflictMessage(conflicts))
+ this.name = 'CleanupProtectionConflictError'
+ this.conflicts = conflicts
+ }
+}
interface NativeCleanupBinding {
readonly planCleanup?: (snapshotJson: string) => string | Promise
@@ -95,6 +121,7 @@ interface NativeCleanupProtectionConflict {
interface NativeCleanupPlan {
readonly filesToDelete: string[]
readonly dirsToDelete: string[]
+ readonly emptyDirsToDelete: string[]
readonly violations: readonly NativeProtectedPathViolation[]
readonly conflicts: readonly NativeCleanupProtectionConflict[]
readonly excludedScanGlobs: string[]
@@ -114,22 +141,32 @@ interface NativeCleanupResult {
readonly conflicts: readonly NativeCleanupProtectionConflict[]
readonly filesToDelete: string[]
readonly dirsToDelete: string[]
+ readonly emptyDirsToDelete: string[]
readonly excludedScanGlobs: string[]
}
-const nativeBinding = getNativeBinding()
-
export function hasNativeCleanupBinding(): boolean {
- return nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null
+ if (nativeCleanupBindingCheck !== null) {
+ return nativeCleanupBindingCheck
+ }
+ const nativeBinding = getNativeBinding()
+ nativeCleanupBindingCheck = nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null
+ return nativeCleanupBindingCheck
+}
+
+function requireNativeCleanupBinding(): NativeCleanupBinding {
+ const nativeBinding = getNativeBinding()
+ if (nativeBinding == null) {
+ throw new Error('Native cleanup binding is required. Build or install the Rust NAPI package before running tnmsc.')
+ }
+ return nativeBinding
}
function mapProtectionMode(mode: ProtectionMode): NativeProtectionMode {
return mode
}
-function mapProtectionRuleMatcher(
- matcher: ProtectionRuleMatcher | undefined
-): NativeProtectionRuleMatcher | undefined {
+function mapProtectionRuleMatcher(matcher: ProtectionRuleMatcher | undefined): NativeProtectionRuleMatcher | undefined {
return matcher
}
@@ -137,19 +174,14 @@ function mapCleanupTarget(target: OutputCleanupPathDeclaration): NativeCleanupTa
return {
path: target.path,
kind: target.kind,
- ...target.excludeBasenames != null && target.excludeBasenames.length > 0
- ? {excludeBasenames: [...target.excludeBasenames]}
- : {},
+ ...target.excludeBasenames != null && target.excludeBasenames.length > 0 ? {excludeBasenames: [...target.excludeBasenames]} : {},
...target.protectionMode != null ? {protectionMode: mapProtectionMode(target.protectionMode)} : {},
...target.scope != null ? {scope: target.scope} : {},
...target.label != null ? {label: target.label} : {}
}
}
-async function collectPluginCleanupDeclarations(
- plugin: OutputPlugin,
- cleanCtx: OutputCleanContext
-): Promise {
+async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise {
if (plugin.declareCleanupPaths == null) return {}
return plugin.declareCleanupPaths({...cleanCtx, dryRun: true})
}
@@ -161,9 +193,7 @@ async function collectPluginCleanupSnapshot(
): Promise {
const existingOutputDeclarations = predeclaredOutputs?.get(plugin)
const [outputs, cleanup] = await Promise.all([
- existingOutputDeclarations != null
- ? Promise.resolve(existingOutputDeclarations)
- : plugin.declareOutputFiles({...cleanCtx, dryRun: true}),
+ existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({...cleanCtx, dryRun: true}),
collectPluginCleanupDeclarations(plugin, cleanCtx)
])
@@ -171,22 +201,14 @@ async function collectPluginCleanupSnapshot(
pluginName: plugin.name,
outputs: outputs.map(output => output.path),
cleanup: {
- ...cleanup.delete != null && cleanup.delete.length > 0
- ? {delete: cleanup.delete.map(mapCleanupTarget)}
- : {},
- ...cleanup.protect != null && cleanup.protect.length > 0
- ? {protect: cleanup.protect.map(mapCleanupTarget)}
- : {},
- ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0
- ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]}
- : {}
+ ...cleanup.delete != null && cleanup.delete.length > 0 ? {delete: cleanup.delete.map(mapCleanupTarget)} : {},
+ ...cleanup.protect != null && cleanup.protect.length > 0 ? {protect: cleanup.protect.map(mapCleanupTarget)} : {},
+ ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0 ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]} : {}
}
}
}
-function collectConfiguredCleanupProtectionRules(
- cleanCtx: OutputCleanContext
-): NativeProtectedRule[] {
+function collectConfiguredCleanupProtectionRules(cleanCtx: OutputCleanContext): NativeProtectedRule[] {
return (cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []).map(rule => ({
path: rule.path,
protectionMode: mapProtectionMode(rule.protectionMode),
@@ -196,49 +218,48 @@ function collectConfiguredCleanupProtectionRules(
}))
}
-function buildCleanupProtectionConflictMessage(
- conflicts: readonly NativeCleanupProtectionConflict[]
-): string {
+function buildCleanupProtectionConflictMessage(conflicts: readonly NativeCleanupProtectionConflict[]): string {
const pathList = conflicts.map(conflict => conflict.outputPath).join(', ')
return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}`
}
-function logCleanupProtectionConflicts(
- logger: ILogger,
- conflicts: readonly NativeCleanupProtectionConflict[]
-): void {
+function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly NativeCleanupProtectionConflict[]): void {
const firstConflict = conflicts[0]
- logger.error(buildDiagnostic({
- code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED',
- title: 'Cleanup output paths conflict with protected inputs',
- rootCause: diagnosticLines(
- `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`,
- firstConflict == null
- ? 'No conflict details were captured.'
- : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".`
- ),
- exactFix: diagnosticLines(
- 'Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'
- ),
- possibleFixes: [
- diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'),
- diagnosticLines('Move the conflicting output target to a generated-only directory.')
- ],
- details: {
- count: conflicts.length,
- conflicts
- }
- }))
+ logger.error(
+ buildDiagnostic({
+ code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED',
+ title: 'Cleanup output paths conflict with protected inputs',
+ rootCause: diagnosticLines(
+ `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`,
+ firstConflict == null
+ ? 'No conflict details were captured.'
+ : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".`
+ ),
+ exactFix: diagnosticLines('Separate generated output paths from protected source or reserved workspace paths before running cleanup again.'),
+ possibleFixes: [
+ diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'),
+ diagnosticLines('Move the conflicting output target to a generated-only directory.')
+ ],
+ details: {
+ count: conflicts.length,
+ conflicts
+ }
+ })
+ )
}
function logCleanupPlanDiagnostics(
logger: ILogger,
- plan: Pick
+ plan: Pick<
+ NativeCleanupPlan | NativeCleanupResult,
+ 'filesToDelete' | 'dirsToDelete' | 'emptyDirsToDelete' | 'violations' | 'conflicts' | 'excludedScanGlobs'
+ >
): void {
logger.debug('cleanup plan built', {
filesToDelete: plan.filesToDelete.length,
- dirsToDelete: plan.dirsToDelete.length,
+ dirsToDelete: plan.dirsToDelete.length + plan.emptyDirsToDelete.length,
+ emptyDirsToDelete: plan.emptyDirsToDelete.length,
violations: plan.violations.length,
conflicts: plan.conflicts.length,
excludedScanGlobs: plan.excludedScanGlobs
@@ -251,17 +272,19 @@ function logNativeCleanupErrors(
): readonly {path: string, type: 'file' | 'directory', error: string}[] {
return errors.map(currentError => {
const type = currentError.kind === 'directory' ? 'directory' : 'file'
- logger.warn(buildFileOperationDiagnostic({
- code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED',
- title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory',
- operation: 'delete',
- targetKind: type,
- path: currentError.path,
- error: currentError.error,
- details: {
- phase: 'cleanup'
- }
- }))
+ logger.warn(
+ buildFileOperationDiagnostic({
+ code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED',
+ title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory',
+ operation: 'delete',
+ targetKind: type,
+ path: currentError.path,
+ error: currentError.error,
+ details: {
+ phase: 'cleanup'
+ }
+ })
+ )
return {path: currentError.path, type, error: currentError.error}
})
@@ -272,9 +295,7 @@ async function buildCleanupSnapshot(
cleanCtx: OutputCleanContext,
predeclaredOutputs?: ReadonlyMap
): Promise {
- const pluginSnapshots = await Promise.all(
- outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs))
- )
+ const pluginSnapshots = await Promise.all(outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs)))
const protectedRules: NativeProtectedRule[] = []
for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) {
@@ -288,11 +309,9 @@ async function buildCleanupSnapshot(
}
if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) {
- for (const rule of collectConfiguredAindexInputRules(
- cleanCtx.pluginOptions as Required,
- cleanCtx.collectedOutputContext.aindexDir,
- {workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path}
- )) {
+ for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, {
+ workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path
+ })) {
protectedRules.push({
path: rule.path,
protectionMode: mapProtectionMode(rule.protectionMode),
@@ -307,9 +326,7 @@ async function buildCleanupSnapshot(
return {
workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path,
- ...cleanCtx.collectedOutputContext.aindexDir != null
- ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir}
- : {},
+ ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {},
projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext),
protectedRules,
pluginSnapshots
@@ -320,17 +337,15 @@ function parseNativeJson(json: string): T {
return JSON.parse(json) as T
}
-export async function planCleanupWithNative(
- snapshot: NativeCleanupSnapshot
-): Promise {
+export async function planCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise {
+ const nativeBinding = requireNativeCleanupBinding()
if (nativeBinding?.planCleanup == null) throw new Error('Native cleanup planning is unavailable')
const result = await Promise.resolve(nativeBinding.planCleanup(JSON.stringify(snapshot)))
return parseNativeJson(result)
}
-export async function performCleanupWithNative(
- snapshot: NativeCleanupSnapshot
-): Promise {
+export async function performCleanupWithNative(snapshot: NativeCleanupSnapshot): Promise {
+ const nativeBinding = requireNativeCleanupBinding()
if (nativeBinding?.performCleanup == null) throw new Error('Native cleanup execution is unavailable')
const result = await Promise.resolve(nativeBinding.performCleanup(JSON.stringify(snapshot)))
return parseNativeJson(result)
@@ -343,14 +358,11 @@ export async function collectDeletionTargets(
): Promise<{
filesToDelete: string[]
dirsToDelete: string[]
+ emptyDirsToDelete: string[]
violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[]
- conflicts: import('./CleanupUtils.fallback').CleanupProtectionConflict[]
+ conflicts: CleanupProtectionConflict[]
excludedScanGlobs: string[]
}> {
- if (!hasNativeCleanupBinding()) {
- return collectDeletionTargetsFallback(outputPlugins, cleanCtx, predeclaredOutputs)
- }
-
const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs)
const plan = await planCleanupWithNative(snapshot)
@@ -360,7 +372,8 @@ export async function collectDeletionTargets(
return {
filesToDelete: plan.filesToDelete,
- dirsToDelete: plan.dirsToDelete,
+ dirsToDelete: plan.dirsToDelete.sort((a, b) => a.localeCompare(b)),
+ emptyDirsToDelete: plan.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)),
violations: [...plan.violations],
conflicts: [],
excludedScanGlobs: plan.excludedScanGlobs
@@ -372,11 +385,7 @@ export async function performCleanup(
cleanCtx: OutputCleanContext,
logger: ILogger,
predeclaredOutputs?: ReadonlyMap
-): Promise {
- if (!hasNativeCleanupBinding()) {
- return performCleanupFallback(outputPlugins, cleanCtx, logger, predeclaredOutputs)
- }
-
+): Promise {
if (predeclaredOutputs != null) {
const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs)
logger.debug('Collected outputs for cleanup', {
@@ -418,7 +427,8 @@ export async function performCleanup(
logger.debug('cleanup delete execution started', {
filesToDelete: result.filesToDelete.length,
- dirsToDelete: result.dirsToDelete.length
+ dirsToDelete: result.dirsToDelete.length + result.emptyDirsToDelete.length,
+ emptyDirsToDelete: result.emptyDirsToDelete.length
})
const loggedErrors = logNativeCleanupErrors(logger, result.errors)
logger.debug('cleanup delete execution complete', {
diff --git a/cli/src/commands/ProtectedDeletionCommands.test.ts b/cli/src/commands/ProtectedDeletionCommands.test.ts
index 4904db22..3b431b65 100644
--- a/cli/src/commands/ProtectedDeletionCommands.test.ts
+++ b/cli/src/commands/ProtectedDeletionCommands.test.ts
@@ -229,4 +229,49 @@ describe('protected deletion commands', () => {
writeSpy.mockRestore()
}
})
+
+ it('includes workspace empty directories in clean dry-run results', async () => {
+ const workspaceDir = path.resolve('tmp-workspace-command-dry-run-empty')
+ const generatedDir = path.join(workspaceDir, 'generated')
+ const generatedFile = path.join(generatedDir, 'AGENTS.md')
+ const emptyLeafDir = path.join(workspaceDir, 'scratch', 'empty', 'leaf')
+ const retainedScratchFile = path.join(workspaceDir, 'scratch', 'keep.md')
+ const plugin: OutputPlugin = {
+ type: PluginKind.Output,
+ name: 'DryRunEmptyDirPlugin',
+ log: createMockLogger(),
+ declarativeOutput: true,
+ outputCapabilities: {},
+ async declareOutputFiles() {
+ return [{path: generatedFile, source: {}}]
+ },
+ async declareCleanupPaths() {
+ return {}
+ },
+ async convertContent() {
+ return ''
+ }
+ }
+
+ fs.rmSync(workspaceDir, {recursive: true, force: true})
+ fs.mkdirSync(generatedDir, {recursive: true})
+ fs.mkdirSync(emptyLeafDir, {recursive: true})
+ fs.mkdirSync(path.dirname(retainedScratchFile), {recursive: true})
+ fs.writeFileSync(generatedFile, '# generated', 'utf8')
+ fs.writeFileSync(retainedScratchFile, '# keep', 'utf8')
+
+ try {
+ const ctx = createCommandContext([plugin], workspaceDir)
+ const result = await new DryRunCleanCommand().execute(ctx)
+
+ expect(result).toEqual(expect.objectContaining({
+ success: true,
+ filesAffected: 1,
+ dirsAffected: 3
+ }))
+ }
+ finally {
+ fs.rmSync(workspaceDir, {recursive: true, force: true})
+ }
+ })
})
diff --git a/cli/src/core/cleanup.rs b/cli/src/core/cleanup.rs
index bbe3ec6b..e7d5afb5 100644
--- a/cli/src/core/cleanup.rs
+++ b/cli/src/core/cleanup.rs
@@ -18,6 +18,24 @@ const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS: [&str; 6] = [
"**/.next/**",
];
+const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 15] = [
+ ".git",
+ "node_modules",
+ "dist",
+ "target",
+ ".next",
+ ".turbo",
+ "coverage",
+ ".nyc_output",
+ ".cache",
+ ".vite",
+ ".vite-temp",
+ ".pnpm-store",
+ ".yarn",
+ ".idea",
+ ".vscode",
+];
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum ProtectionModeDto {
@@ -129,6 +147,7 @@ pub struct CleanupProtectionConflictDto {
pub struct CleanupPlan {
pub files_to_delete: Vec,
pub dirs_to_delete: Vec,
+ pub empty_dirs_to_delete: Vec,
pub violations: Vec,
pub conflicts: Vec,
pub excluded_scan_globs: Vec,
@@ -152,6 +171,7 @@ pub struct CleanupExecutionResultDto {
pub conflicts: Vec,
pub files_to_delete: Vec,
pub dirs_to_delete: Vec,
+ pub empty_dirs_to_delete: Vec,
pub excluded_scan_globs: Vec,
}
@@ -830,6 +850,132 @@ fn compact_deletion_targets(files: &[String], dirs: &[String]) -> (Vec,
(compacted_files, compacted_dir_paths)
}
+fn should_skip_empty_directory_tree(workspace_dir: &str, current_dir: &str) -> bool {
+ if current_dir == workspace_dir {
+ return false;
+ }
+
+ Path::new(current_dir)
+ .file_name()
+ .and_then(|value| value.to_str())
+ .is_some_and(|basename| EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&basename))
+}
+
+fn collect_empty_workspace_directories(
+ current_dir: &Path,
+ workspace_dir: &str,
+ files_to_delete: &HashSet,
+ dirs_to_delete: &HashSet,
+ empty_dirs_to_delete: &mut BTreeSet,
+) -> bool {
+ let current_dir = normalize_path(current_dir);
+ let current_dir_string = path_to_string(¤t_dir);
+
+ if dirs_to_delete.contains(¤t_dir_string) {
+ return true;
+ }
+
+ if should_skip_empty_directory_tree(workspace_dir, ¤t_dir_string) {
+ return false;
+ }
+
+ let Ok(entries) = fs::read_dir(¤t_dir) else {
+ return false;
+ };
+
+ let mut has_retained_entries = false;
+
+ for entry in entries {
+ let Ok(entry) = entry else {
+ has_retained_entries = true;
+ continue;
+ };
+
+ let entry_path = normalize_path(&entry.path());
+ let entry_string = path_to_string(&entry_path);
+
+ if dirs_to_delete.contains(&entry_string) {
+ continue;
+ }
+
+ let Ok(file_type) = entry.file_type() else {
+ has_retained_entries = true;
+ continue;
+ };
+
+ if file_type.is_dir() {
+ if should_skip_empty_directory_tree(workspace_dir, &entry_string) {
+ has_retained_entries = true;
+ continue;
+ }
+
+ if collect_empty_workspace_directories(
+ &entry_path,
+ workspace_dir,
+ files_to_delete,
+ dirs_to_delete,
+ empty_dirs_to_delete,
+ ) {
+ empty_dirs_to_delete.insert(entry_string);
+ continue;
+ }
+
+ has_retained_entries = true;
+ continue;
+ }
+
+ if files_to_delete.contains(&entry_string) {
+ continue;
+ }
+
+ has_retained_entries = true;
+ }
+
+ !has_retained_entries
+}
+
+fn plan_workspace_empty_directory_cleanup(
+ workspace_dir: &str,
+ files_to_delete: &[String],
+ dirs_to_delete: &[String],
+ guard: &ProtectedDeletionGuard,
+) -> (Vec, Vec) {
+ let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir));
+ let files_to_delete = files_to_delete
+ .iter()
+ .map(|path| path_to_string(&resolve_absolute_path(path)))
+ .collect::>();
+ let dirs_to_delete = dirs_to_delete
+ .iter()
+ .map(|path| path_to_string(&resolve_absolute_path(path)))
+ .collect::>();
+ let mut discovered_empty_dirs = BTreeSet::new();
+
+ collect_empty_workspace_directories(
+ Path::new(&workspace_dir),
+ &workspace_dir,
+ &files_to_delete,
+ &dirs_to_delete,
+ &mut discovered_empty_dirs,
+ );
+
+ let mut safe_empty_dirs = Vec::new();
+ let mut violations = Vec::new();
+
+ for empty_dir in discovered_empty_dirs {
+ if let Some(violation) = get_protected_path_violation(&empty_dir, guard) {
+ violations.push(violation);
+ } else {
+ safe_empty_dirs.push(empty_dir);
+ }
+ }
+
+ safe_empty_dirs.sort();
+ violations.sort_by(|a, b| a.target_path.cmp(&b.target_path));
+
+ (safe_empty_dirs, violations)
+}
+
fn detect_cleanup_protection_conflicts(
output_path_owners: &HashMap>,
guard: &ProtectedDeletionGuard,
@@ -994,6 +1140,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result {
return Ok(CleanupPlan {
files_to_delete: Vec::new(),
dirs_to_delete: Vec::new(),
+ empty_dirs_to_delete: Vec::new(),
violations: Vec::new(),
conflicts,
excluded_scan_globs: ignore_globs,
@@ -1006,14 +1153,22 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result {
partition_deletion_targets(&delete_dirs.into_iter().collect::>(), &guard);
let (files_to_delete, dirs_to_delete) =
compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths);
+ let (empty_dirs_to_delete, empty_dir_violations) = plan_workspace_empty_directory_cleanup(
+ &snapshot.workspace_dir,
+ &files_to_delete,
+ &dirs_to_delete,
+ &guard,
+ );
let mut violations = file_partition.violations;
violations.extend(dir_partition.violations);
+ violations.extend(empty_dir_violations);
violations.sort_by(|a, b| a.target_path.cmp(&b.target_path));
Ok(CleanupPlan {
files_to_delete,
dirs_to_delete,
+ empty_dirs_to_delete,
violations,
conflicts: Vec::new(),
excluded_scan_globs: ignore_globs,
@@ -1031,11 +1186,13 @@ pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result Result void
+ readonly error: (diagnostic: object) => void
+}
+
+export interface SafeWriteOptions {
+ readonly fullPath: string
+ readonly content: string | Buffer
+ readonly type: string
+ readonly relativePath: string
+ readonly dryRun: boolean
+ readonly logger: WriteLogger
+}
+
+export interface SafeWriteResult {
+ readonly path: string
+ readonly success: boolean
+ readonly skipped?: boolean
+ readonly error?: Error
+}
interface NativeDeskPathsBinding {
readonly getPlatformFixedDir?: () => string
@@ -27,6 +50,7 @@ interface NativeDeskPathsBinding {
readonly readFileSync?: (filePath: string, encoding?: BufferEncoding) => string
readonly deleteFiles?: (files: readonly string[]) => DeletionResult | Promise
readonly deleteDirectories?: (dirs: readonly string[]) => DeletionResult | Promise
+ readonly deleteEmptyDirectories?: (dirs: readonly string[]) => DeletionResult | Promise
readonly deleteTargets?: (targets: {readonly files?: readonly string[], readonly dirs?: readonly string[]}) => DeleteTargetsResult | Promise
}
@@ -37,11 +61,28 @@ type NativeDeletionResult = DeletionResult & {
type NativeDeleteTargetsResult = DeleteTargetsResult & {
readonly deleted_files?: readonly string[]
readonly deleted_dirs?: readonly string[]
- readonly file_errors?: readonly import('./desk-paths-fallback').DeletionError[]
- readonly dir_errors?: readonly import('./desk-paths-fallback').DeletionError[]
+ readonly file_errors?: readonly DeletionError[]
+ readonly dir_errors?: readonly DeletionError[]
}
-const nativeBinding = getNativeBinding()
+function requireNativeDeskPathsBinding(): NativeDeskPathsBinding {
+ const binding = getNativeBinding()
+ if (binding == null) {
+ throw new Error('Native desk-paths binding is required. Build or install the Rust NAPI package before running tnmsc.')
+ }
+ return binding
+}
+
+function requireDeskPathsMethod(
+ methodName: K
+): NonNullable {
+ const binding = requireNativeDeskPathsBinding()
+ const method = binding[methodName]
+ if (method == null) {
+ throw new Error(`Native desk-paths binding is missing "${String(methodName)}". Rebuild the Rust NAPI package before running tnmsc.`)
+ }
+ return method
+}
function normalizeDeletionResult(result: NativeDeletionResult): DeletionResult {
return {
@@ -61,62 +102,49 @@ function normalizeDeleteTargetsResult(result: NativeDeleteTargetsResult): Delete
}
export function getPlatformFixedDir(): string {
- return fallback.getPlatformFixedDir()
+ return requireDeskPathsMethod('getPlatformFixedDir')()
}
export function ensureDir(dir: string): void {
- if (nativeBinding?.ensureDir != null) {
- nativeBinding.ensureDir(dir)
- return
- }
- fallback.ensureDir(dir)
+ requireDeskPathsMethod('ensureDir')(dir)
}
export function existsSync(targetPath: string): boolean {
- return nativeBinding?.existsSync?.(targetPath) ?? fallback.existsSync(targetPath)
+ return requireDeskPathsMethod('existsSync')(targetPath)
}
export function deletePathSync(targetPath: string): void {
- if (nativeBinding?.deletePathSync != null) {
- nativeBinding.deletePathSync(targetPath)
- return
- }
- fallback.deletePathSync(targetPath)
+ requireDeskPathsMethod('deletePathSync')(targetPath)
}
export function writeFileSync(filePath: string, data: string | Buffer, encoding: BufferEncoding = 'utf8'): void {
- if (nativeBinding?.writeFileSync != null) {
- nativeBinding.writeFileSync(filePath, data, encoding)
- return
- }
- fallback.writeFileSync(filePath, data, encoding)
+ requireDeskPathsMethod('writeFileSync')(filePath, data, encoding)
}
export function readFileSync(filePath: string, encoding: BufferEncoding = 'utf8'): string {
- return nativeBinding?.readFileSync?.(filePath, encoding) ?? fallback.readFileSync(filePath, encoding)
+ return requireDeskPathsMethod('readFileSync')(filePath, encoding)
}
export async function deleteFiles(files: readonly string[]): Promise {
- if (nativeBinding?.deleteFiles != null) return normalizeDeletionResult(await Promise.resolve(nativeBinding.deleteFiles(files) as NativeDeletionResult))
- return fallback.deleteFiles(files)
+ return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteFiles')(files) as NativeDeletionResult))
}
export async function deleteDirectories(dirs: readonly string[]): Promise {
- if (nativeBinding?.deleteDirectories != null) return normalizeDeletionResult(await Promise.resolve(nativeBinding.deleteDirectories(dirs) as NativeDeletionResult))
- return fallback.deleteDirectories(dirs)
+ return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteDirectories')(dirs) as NativeDeletionResult))
+}
+
+export async function deleteEmptyDirectories(dirs: readonly string[]): Promise {
+ return normalizeDeletionResult(await Promise.resolve(requireDeskPathsMethod('deleteEmptyDirectories')(dirs) as NativeDeletionResult))
}
export async function deleteTargets(targets: {
readonly files?: readonly string[]
readonly dirs?: readonly string[]
}): Promise {
- if (nativeBinding?.deleteTargets != null) {
- return normalizeDeleteTargetsResult(await Promise.resolve(nativeBinding.deleteTargets({
- files: targets.files ?? [],
- dirs: targets.dirs ?? []
- }) as NativeDeleteTargetsResult))
- }
- return fallback.deleteTargets(targets)
+ return normalizeDeleteTargetsResult(await Promise.resolve(requireDeskPathsMethod('deleteTargets')({
+ files: targets.files ?? [],
+ dirs: targets.dirs ?? []
+ }) as NativeDeleteTargetsResult))
}
export function writeFileSafe(options: SafeWriteOptions): SafeWriteResult {
diff --git a/cli/src/core/desk_paths.rs b/cli/src/core/desk_paths.rs
index 34c9f530..6b6d72f0 100644
--- a/cli/src/core/desk_paths.rs
+++ b/cli/src/core/desk_paths.rs
@@ -283,6 +283,30 @@ pub struct DeleteTargetsResult {
pub dir_errors: Vec,
}
+fn delete_empty_directory(path: impl AsRef) -> io::Result {
+ let path = path.as_ref();
+ let metadata = match fs::symlink_metadata(path) {
+ Ok(metadata) => metadata,
+ Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(false),
+ Err(err) => return Err(err),
+ };
+
+ if metadata.file_type().is_symlink() || !metadata.is_dir() {
+ return Ok(false);
+ }
+
+ match fs::remove_dir(path) {
+ Ok(()) => Ok(true),
+ Err(err)
+ if err.kind() == io::ErrorKind::NotFound
+ || err.kind() == io::ErrorKind::DirectoryNotEmpty =>
+ {
+ Ok(false)
+ }
+ Err(err) => Err(err),
+ }
+}
+
pub fn delete_files(paths: &[String]) -> DeletionResult {
let mut result = DeletionResult {
deleted: 0,
@@ -330,6 +354,31 @@ pub fn delete_directories(paths: &[String]) -> DeletionResult {
result
}
+pub fn delete_empty_directories(paths: &[String]) -> DeletionResult {
+ let mut sorted_paths = paths.to_vec();
+ sorted_paths.sort_by(|a, b| b.len().cmp(&a.len()).then_with(|| b.cmp(a)));
+
+ let mut result = DeletionResult {
+ deleted: 0,
+ deleted_paths: Vec::new(),
+ errors: Vec::new(),
+ };
+ for path in &sorted_paths {
+ match delete_empty_directory(Path::new(path)) {
+ Ok(true) => {
+ result.deleted += 1;
+ result.deleted_paths.push(path.clone());
+ }
+ Ok(false) => {}
+ Err(err) => result.errors.push(DeletionError {
+ path: path.clone(),
+ error: err.to_string(),
+ }),
+ }
+ }
+ result
+}
+
pub fn delete_targets(files: &[String], dirs: &[String]) -> DeleteTargetsResult {
let file_result = delete_files(files);
let dir_result = delete_directories(dirs);
@@ -457,6 +506,16 @@ mod napi_binding {
}
}
+ #[napi]
+ pub fn delete_empty_directories(paths: Vec) -> NapiDeletionResult {
+ let result = super::delete_empty_directories(&paths);
+ NapiDeletionResult {
+ deleted: result.deleted as u32,
+ deletedPaths: result.deleted_paths,
+ errors: result.errors.into_iter().map(to_napi_error).collect(),
+ }
+ }
+
#[napi(object)]
pub struct DeleteTargetsInput {
pub files: Option>,
@@ -512,4 +571,51 @@ mod tests {
assert!(result.file_errors.is_empty());
assert!(result.dir_errors.is_empty());
}
+
+ #[test]
+ fn delete_empty_directories_only_removes_empty_paths() {
+ let dir = tempdir().unwrap();
+ let parent_dir = dir.path().join("empty-parent");
+ let child_dir = parent_dir.join("leaf");
+ let non_empty_dir = dir.path().join("non-empty");
+ fs::create_dir_all(&child_dir).unwrap();
+ fs::create_dir_all(&non_empty_dir).unwrap();
+ fs::write(non_empty_dir.join("keep.txt"), b"keep").unwrap();
+
+ let result = delete_empty_directories(&[
+ parent_dir.to_string_lossy().into_owned(),
+ child_dir.to_string_lossy().into_owned(),
+ non_empty_dir.to_string_lossy().into_owned(),
+ ]);
+
+ assert_eq!(result.deleted, 2);
+ assert_eq!(
+ result.deleted_paths,
+ vec![
+ child_dir.to_string_lossy().into_owned(),
+ parent_dir.to_string_lossy().into_owned(),
+ ]
+ );
+ assert!(result.errors.is_empty());
+ assert!(!parent_dir.exists());
+ assert!(non_empty_dir.exists());
+ }
+
+ #[test]
+ fn delete_empty_directories_skips_non_empty_and_missing_paths() {
+ let dir = tempdir().unwrap();
+ let target_dir = dir.path().join("maybe-empty");
+ fs::create_dir_all(&target_dir).unwrap();
+ fs::write(target_dir.join("new-file.txt"), b"late write").unwrap();
+
+ let result = delete_empty_directories(&[
+ target_dir.to_string_lossy().into_owned(),
+ dir.path().join("missing").to_string_lossy().into_owned(),
+ ]);
+
+ assert_eq!(result.deleted, 0);
+ assert!(result.deleted_paths.is_empty());
+ assert!(result.errors.is_empty());
+ assert!(target_dir.exists());
+ }
}
diff --git a/cli/src/core/native-binding.ts b/cli/src/core/native-binding.ts
index 4ea16586..0ab5605d 100644
--- a/cli/src/core/native-binding.ts
+++ b/cli/src/core/native-binding.ts
@@ -1,6 +1,12 @@
import {createRequire} from 'node:module'
import process from 'node:process'
+declare global {
+ interface GlobalThis {
+ __TNMSC_TEST_NATIVE_BINDING__?: object
+ }
+}
+
function shouldSkipNativeBinding(): boolean {
if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false
if (process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1') return true
@@ -11,6 +17,8 @@ function shouldSkipNativeBinding(): boolean {
}
export function tryLoadNativeBinding(): T | undefined {
+ const testBinding: unknown = globalThis.__TNMSC_TEST_NATIVE_BINDING__
+ if (testBinding != null && typeof testBinding === 'object') return testBinding as T
if (shouldSkipNativeBinding()) return void 0
const suffixMap: Readonly> = {
diff --git a/cli/src/plugins/desk-paths.test.ts b/cli/src/plugins/desk-paths.test.ts
index 17786de9..a266f9e5 100644
--- a/cli/src/plugins/desk-paths.test.ts
+++ b/cli/src/plugins/desk-paths.test.ts
@@ -3,68 +3,32 @@ import * as os from 'node:os'
import * as path from 'node:path'
import {afterEach, describe, expect, it, vi} from 'vitest'
-import {deleteFiles, deleteTargets, getPlatformFixedDir} from '../core/desk-paths'
-
-const {resolveRuntimeEnvironmentMock, resolveUserPathMock} = vi.hoisted(() => ({
- resolveRuntimeEnvironmentMock: vi.fn(),
- resolveUserPathMock: vi.fn((value: string) => value)
-}))
-
-vi.mock('@/runtime-environment', async importActual => {
- const actual = await importActual()
- return {
- ...actual,
- resolveRuntimeEnvironment: resolveRuntimeEnvironmentMock,
- resolveUserPath: resolveUserPathMock
- }
-})
+import {deleteEmptyDirectories, deleteFiles, deleteTargets, getPlatformFixedDir} from '../core/desk-paths'
-const originalXdgDataHome = process.env['XDG_DATA_HOME']
-const originalLocalAppData = process.env['LOCALAPPDATA']
+const defaultNativeBinding = globalThis.__TNMSC_TEST_NATIVE_BINDING__
describe('desk paths', () => {
afterEach(() => {
vi.restoreAllMocks()
vi.clearAllMocks()
-
- if (originalXdgDataHome == null) delete process.env['XDG_DATA_HOME']
- else process.env['XDG_DATA_HOME'] = originalXdgDataHome
- if (originalLocalAppData == null) delete process.env['LOCALAPPDATA']
- else process.env['LOCALAPPDATA'] = originalLocalAppData
+ globalThis.__TNMSC_TEST_NATIVE_BINDING__ = defaultNativeBinding
})
- it('uses linux data paths outside WSL', () => {
- delete process.env['XDG_DATA_HOME']
- resolveRuntimeEnvironmentMock.mockReturnValue({
- platform: 'linux',
- isWsl: false,
- nativeHomeDir: '/home/alpha',
- effectiveHomeDir: '/home/alpha',
- globalConfigCandidates: [],
- windowsUsersRoot: '/mnt/c/Users',
- expandedEnv: {}
- })
-
- expect(getPlatformFixedDir().replaceAll('\\', '/')).toBe(path.join('/home/alpha', '.local', 'share').replaceAll('\\', '/'))
+ it('delegates getPlatformFixedDir to the native binding', () => {
+ const getPlatformFixedDirMock = vi.fn(() => '/tmp/native-fixed-dir')
+ globalThis.__TNMSC_TEST_NATIVE_BINDING__ = {
+ ...defaultNativeBinding,
+ getPlatformFixedDir: getPlatformFixedDirMock
+ }
+
+ expect(getPlatformFixedDir()).toBe('/tmp/native-fixed-dir')
+ expect(getPlatformFixedDirMock).toHaveBeenCalledOnce()
})
- it('uses Windows fixed-dir semantics when WSL targets the host home', () => {
- process.env['LOCALAPPDATA'] = 'C:\\Users\\alpha\\AppData\\Local'
- resolveRuntimeEnvironmentMock.mockReturnValue({
- platform: 'linux',
- isWsl: true,
- nativeHomeDir: '/home/alpha',
- effectiveHomeDir: '/mnt/c/Users/alpha',
- globalConfigCandidates: ['/mnt/c/Users/alpha/.aindex/.tnmsc.json'],
- selectedGlobalConfigPath: '/mnt/c/Users/alpha/.aindex/.tnmsc.json',
- wslHostHomeDir: '/mnt/c/Users/alpha',
- windowsUsersRoot: '/mnt/c/Users',
- expandedEnv: {}
- })
- resolveUserPathMock.mockReturnValue('/mnt/c/Users/alpha/AppData/Local')
-
- expect(getPlatformFixedDir()).toBe('/mnt/c/Users/alpha/AppData/Local')
- expect(resolveUserPathMock).toHaveBeenCalledWith('C:\\Users\\alpha\\AppData\\Local')
+ it('throws when the native desk-paths binding is unavailable', () => {
+ globalThis.__TNMSC_TEST_NATIVE_BINDING__ = void 0
+
+ expect(() => getPlatformFixedDir()).toThrow('Native desk-paths binding is required')
})
it('deletes mixed file and directory targets in one batch', async () => {
@@ -130,4 +94,48 @@ describe('desk paths', () => {
fs.rmSync(tempDir, {recursive: true, force: true})
}
})
+
+ it('deletes only empty directories from deepest to shallowest', async () => {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-dirs-'))
+ const parentDir = path.join(tempDir, 'empty-parent')
+ const childDir = path.join(parentDir, 'leaf')
+ const nonEmptyDir = path.join(tempDir, 'non-empty')
+
+ try {
+ fs.mkdirSync(childDir, {recursive: true})
+ fs.mkdirSync(nonEmptyDir, {recursive: true})
+ fs.writeFileSync(path.join(nonEmptyDir, 'keep.txt'), 'keep', 'utf8')
+
+ const result = await deleteEmptyDirectories([parentDir, childDir, nonEmptyDir])
+
+ expect(result.deleted).toBe(2)
+ expect(result.deletedPaths).toEqual([childDir, parentDir])
+ expect(result.errors).toEqual([])
+ expect(fs.existsSync(parentDir)).toBe(false)
+ expect(fs.existsSync(nonEmptyDir)).toBe(true)
+ }
+ finally {
+ fs.rmSync(tempDir, {recursive: true, force: true})
+ }
+ })
+
+ it('skips directories that become non-empty before empty-directory deletion runs', async () => {
+ const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-desk-paths-empty-race-'))
+ const targetDir = path.join(tempDir, 'maybe-empty')
+
+ try {
+ fs.mkdirSync(targetDir, {recursive: true})
+ fs.writeFileSync(path.join(targetDir, 'new-file.txt'), 'late write', 'utf8')
+
+ const result = await deleteEmptyDirectories([targetDir, path.join(tempDir, 'missing')])
+
+ expect(result.deleted).toBe(0)
+ expect(result.deletedPaths).toEqual([])
+ expect(result.errors).toEqual([])
+ expect(fs.existsSync(targetDir)).toBe(true)
+ }
+ finally {
+ fs.rmSync(tempDir, {recursive: true, force: true})
+ }
+ })
})
diff --git a/cli/src/plugins/plugin-core/filters.ts b/cli/src/plugins/plugin-core/filters.ts
index 472e7717..dbbf0f7e 100644
--- a/cli/src/plugins/plugin-core/filters.ts
+++ b/cli/src/plugins/plugin-core/filters.ts
@@ -4,98 +4,42 @@ import type {
SeriName
} from './types'
import * as fs from 'node:fs'
-import {createRequire} from 'node:module'
import * as path from 'node:path'
-import process from 'node:process'
-
-/** Core series filtering helpers. Delegates to the unified CLI Rust NAPI when available, falls back to pure-TS implementations otherwise. */
-function resolveEffectiveIncludeSeriesTS(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] {
- if (topLevel == null && typeSpecific == null) return []
- return [...new Set([...topLevel ?? [], ...typeSpecific ?? []])]
-}
-
-function matchesSeriesTS(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean {
- if (seriName == null) return true
- if (effectiveIncludeSeries.length === 0) return true
- if (typeof seriName === 'string') return effectiveIncludeSeries.includes(seriName)
- return seriName.some(name => effectiveIncludeSeries.includes(name))
-}
-
-function resolveSubSeriesTS(
- topLevel?: Readonly>,
- typeSpecific?: Readonly>
-): Record {
- if (topLevel == null && typeSpecific == null) return {}
- const merged: Record = {}
- for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values]
- for (const [key, values] of Object.entries(typeSpecific ?? {})) {
- const existingValues = merged[key] ?? []
- merged[key] = Object.hasOwn(merged, key) ? [...new Set([...existingValues, ...values])] : [...values]
- }
- return merged
-}
+import {getNativeBinding} from '@/core/native-binding'
interface SeriesFilterFns {
- resolveEffectiveIncludeSeries: typeof resolveEffectiveIncludeSeriesTS
- matchesSeries: typeof matchesSeriesTS
- resolveSubSeries: typeof resolveSubSeriesTS
-}
-
-function isSeriesFilterFns(candidate: unknown): candidate is SeriesFilterFns {
- if (candidate == null || typeof candidate !== 'object') return false
- const c = candidate as Record
- return typeof c['matchesSeries'] === 'function'
- && typeof c['resolveEffectiveIncludeSeries'] === 'function'
- && typeof c['resolveSubSeries'] === 'function'
+ readonly resolveEffectiveIncludeSeries: (
+ topLevel?: readonly string[],
+ typeSpecific?: readonly string[]
+ ) => string[]
+ readonly matchesSeries: (
+ seriName: string | readonly string[] | null | undefined,
+ effectiveIncludeSeries: readonly string[]
+ ) => boolean
+ readonly resolveSubSeries: (
+ topLevel?: Readonly>,
+ typeSpecific?: Readonly>
+ ) => Record
}
-function tryLoadNapi(): SeriesFilterFns | undefined {
- const suffixMap: Record = {
- 'win32-x64': 'win32-x64-msvc',
- 'linux-x64': 'linux-x64-gnu',
- 'linux-arm64': 'linux-arm64-gnu',
- 'darwin-arm64': 'darwin-arm64',
- 'darwin-x64': 'darwin-x64'
+function requireSeriesFilterFns(): SeriesFilterFns {
+ const candidate = getNativeBinding()
+ if (candidate == null) {
+ throw new TypeError('Native series-filter binding is required. Build or install the Rust NAPI package before running tnmsc.')
}
- const suffix = suffixMap[`${process.platform}-${process.arch}`]
- if (suffix == null) return void 0
-
- const packageName = `@truenine/memory-sync-cli-${suffix}`
- const binaryFile = `napi-memory-sync-cli.${suffix}.node`
-
- try {
- const _require = createRequire(import.meta.url)
- const candidates = [
- packageName,
- `${packageName}/${binaryFile}`,
- `./${binaryFile}`
- ]
-
- for (const specifier of candidates) {
- try {
- const loaded = _require(specifier) as unknown
- const possible = [loaded, (loaded as {default?: unknown})?.default, (loaded as {config?: unknown})?.config]
- for (const candidate of possible) {
- if (isSeriesFilterFns(candidate)) return candidate
- }
- }
- catch {}
- }
+ if (typeof candidate.matchesSeries !== 'function'
+ || typeof candidate.resolveEffectiveIncludeSeries !== 'function'
+ || typeof candidate.resolveSubSeries !== 'function') {
+ throw new TypeError('Native series-filter binding is incomplete. Rebuild the Rust NAPI package before running tnmsc.')
}
- catch {
- } // NAPI unavailable — pure-TS fallback will be used.
- return void 0
+ return candidate
}
const {
resolveEffectiveIncludeSeries,
matchesSeries,
resolveSubSeries
-}: SeriesFilterFns = tryLoadNapi() ?? {
- resolveEffectiveIncludeSeries: resolveEffectiveIncludeSeriesTS,
- matchesSeries: matchesSeriesTS,
- resolveSubSeries: resolveSubSeriesTS
-}
+}: SeriesFilterFns = requireSeriesFilterFns()
/**
* Interface for items that can be filtered by series name
diff --git a/cli/test/native-binding/cleanup.ts b/cli/test/native-binding/cleanup.ts
new file mode 100644
index 00000000..c7aa3c7a
--- /dev/null
+++ b/cli/test/native-binding/cleanup.ts
@@ -0,0 +1,499 @@
+import type { DeletionError } from "./desk-paths";
+import type {
+ ILogger,
+ OutputCleanContext,
+ OutputCleanupDeclarations,
+ OutputCleanupPathDeclaration,
+ OutputFileDeclaration,
+ OutputPlugin,
+ PluginOptions,
+} from "../../src/plugins/plugin-core";
+import type { ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher } from "../../src/ProtectedDeletionGuard";
+import * as fs from "node:fs";
+import * as path from "node:path";
+import glob from "fast-glob";
+import { buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines } from "@/diagnostics";
+import { compactDeletionTargets } from "../../src/cleanup/delete-targets";
+import { planWorkspaceEmptyDirectoryCleanup } from "../../src/cleanup/empty-directories";
+import { deleteEmptyDirectories, deleteTargets as deskDeleteTargets } from "./desk-paths";
+import { collectAllPluginOutputs } from "../../src/plugins/plugin-core";
+import {
+ buildComparisonKeys,
+ collectConfiguredAindexInputRules,
+ collectProjectRoots,
+ collectProtectedInputSourceRules,
+ createProtectedDeletionGuard,
+ logProtectedDeletionGuardError,
+ partitionDeletionTargets,
+ resolveAbsolutePath,
+} from "../../src/ProtectedDeletionGuard";
+
+/**
+ * Result of cleanup operation
+ */
+export interface CleanupResult {
+ readonly deletedFiles: number;
+ readonly deletedDirs: number;
+ readonly errors: readonly CleanupError[];
+ readonly violations: readonly import("../../src/ProtectedDeletionGuard").ProtectedPathViolation[];
+ readonly conflicts: readonly CleanupProtectionConflict[];
+ readonly message?: string;
+}
+
+/**
+ * Error during cleanup operation
+ */
+export interface CleanupError {
+ readonly path: string;
+ readonly type: "file" | "directory";
+ readonly error: unknown;
+}
+
+export interface CleanupProtectionConflict {
+ readonly outputPath: string;
+ readonly outputPlugin: string;
+ readonly protectedPath: string;
+ readonly protectionMode: ProtectionMode;
+ readonly protectedBy: string;
+ readonly reason: string;
+}
+
+export class CleanupProtectionConflictError extends Error {
+ readonly conflicts: readonly CleanupProtectionConflict[];
+
+ constructor(conflicts: readonly CleanupProtectionConflict[]) {
+ super(buildCleanupProtectionConflictMessage(conflicts));
+ this.name = "CleanupProtectionConflictError";
+ this.conflicts = conflicts;
+ }
+}
+
+interface CleanupTargetCollections {
+ readonly filesToDelete: string[];
+ readonly dirsToDelete: string[];
+ readonly emptyDirsToDelete: string[];
+ readonly violations: readonly import("../../src/ProtectedDeletionGuard").ProtectedPathViolation[];
+ readonly conflicts: readonly CleanupProtectionConflict[];
+ readonly excludedScanGlobs: string[];
+}
+
+const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = ["**/node_modules/**", "**/.git/**", "**/.turbo/**", "**/.pnpm-store/**", "**/.yarn/**", "**/.next/**"] as const;
+
+function normalizeGlobPattern(pattern: string): string {
+ return resolveAbsolutePath(pattern).replaceAll("\\", "/");
+}
+
+function expandCleanupGlob(pattern: string, ignoreGlobs: readonly string[]): readonly string[] {
+ const normalizedPattern = normalizeGlobPattern(pattern);
+ return glob.sync(normalizedPattern, {
+ onlyFiles: false,
+ dot: true,
+ absolute: true,
+ followSymbolicLinks: false,
+ ignore: [...ignoreGlobs],
+ });
+}
+
+function shouldExcludeCleanupMatch(matchedPath: string, target: OutputCleanupPathDeclaration): boolean {
+ if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false;
+ const basename = path.basename(matchedPath);
+ return target.excludeBasenames.includes(basename);
+}
+
+async function collectPluginCleanupDeclarations(plugin: OutputPlugin, cleanCtx: OutputCleanContext): Promise {
+ if (plugin.declareCleanupPaths == null) return {};
+ return plugin.declareCleanupPaths({ ...cleanCtx, dryRun: true });
+}
+
+async function collectPluginCleanupSnapshot(
+ plugin: OutputPlugin,
+ cleanCtx: OutputCleanContext,
+ predeclaredOutputs?: ReadonlyMap,
+): Promise<{
+ readonly plugin: OutputPlugin;
+ readonly outputs: Awaited>;
+ readonly cleanup: OutputCleanupDeclarations;
+}> {
+ const existingOutputDeclarations = predeclaredOutputs?.get(plugin);
+ const [outputs, cleanup] = await Promise.all([
+ existingOutputDeclarations != null ? Promise.resolve(existingOutputDeclarations) : plugin.declareOutputFiles({ ...cleanCtx, dryRun: true }),
+ collectPluginCleanupDeclarations(plugin, cleanCtx),
+ ]);
+
+ return { plugin, outputs, cleanup };
+}
+
+function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string {
+ const pathList = conflicts.map((conflict) => conflict.outputPath).join(", ");
+ return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}`;
+}
+
+function detectCleanupProtectionConflicts(
+ outputPathOwners: ReadonlyMap,
+ guard: ReturnType,
+): CleanupProtectionConflict[] {
+ const conflicts: CleanupProtectionConflict[] = [];
+
+ for (const [outputPath, outputPlugins] of outputPathOwners.entries()) {
+ const outputKeys = new Set(buildComparisonKeys(outputPath));
+
+ for (const rule of guard.compiledRules) {
+ const isExactMatch = rule.comparisonKeys.some((ruleKey) => outputKeys.has(ruleKey));
+ if (!isExactMatch) continue;
+
+ for (const outputPlugin of outputPlugins) {
+ conflicts.push({
+ outputPath,
+ outputPlugin,
+ protectedPath: rule.path,
+ protectionMode: rule.protectionMode,
+ protectedBy: rule.source,
+ reason: rule.reason,
+ });
+ }
+ }
+ }
+
+ return conflicts.sort((a, b) => {
+ const pathDiff = a.outputPath.localeCompare(b.outputPath);
+ if (pathDiff !== 0) return pathDiff;
+ return a.protectedPath.localeCompare(b.protectedPath);
+ });
+}
+
+function logCleanupProtectionConflicts(logger: ILogger, conflicts: readonly CleanupProtectionConflict[]): void {
+ const firstConflict = conflicts[0];
+
+ logger.error(
+ buildDiagnostic({
+ code: "CLEANUP_PROTECTION_CONFLICT_DETECTED",
+ title: "Cleanup output paths conflict with protected inputs",
+ rootCause: diagnosticLines(
+ `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`,
+ firstConflict == null
+ ? "No conflict details were captured."
+ : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".`,
+ ),
+ exactFix: diagnosticLines("Separate generated output paths from protected source or reserved workspace paths before running cleanup again."),
+ possibleFixes: [
+ diagnosticLines("Update cleanup protect declarations so they do not overlap generated outputs."),
+ diagnosticLines("Move the conflicting output target to a generated-only directory."),
+ ],
+ details: {
+ count: conflicts.length,
+ conflicts: conflicts.map((conflict) => ({
+ outputPath: conflict.outputPath,
+ outputPlugin: conflict.outputPlugin,
+ protectedPath: conflict.protectedPath,
+ protectionMode: conflict.protectionMode,
+ protectedBy: conflict.protectedBy,
+ reason: conflict.reason,
+ })),
+ },
+ }),
+ );
+}
+
+/**
+ * Collect deletion targets from enabled output plugins.
+ */
+async function collectCleanupTargets(
+ outputPlugins: readonly OutputPlugin[],
+ cleanCtx: OutputCleanContext,
+ predeclaredOutputs?: ReadonlyMap,
+): Promise {
+ const deleteFiles = new Set();
+ const deleteDirs = new Set();
+ const protectedRules = new Map();
+ const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS);
+ const outputPathOwners = new Map();
+
+ const pluginSnapshots = await Promise.all(outputPlugins.map(async (plugin) => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs)));
+
+ const addDeletePath = (rawPath: string, kind: "file" | "directory"): void => {
+ if (kind === "directory") deleteDirs.add(resolveAbsolutePath(rawPath));
+ else deleteFiles.add(resolveAbsolutePath(rawPath));
+ };
+
+ const addProtectRule = (rawPath: string, protectionMode: ProtectionMode, reason: string, source: string, matcher: ProtectionRuleMatcher = "path"): void => {
+ const resolvedPath = resolveAbsolutePath(rawPath);
+ protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, {
+ path: resolvedPath,
+ protectionMode,
+ reason,
+ source,
+ matcher,
+ });
+ };
+
+ const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => {
+ if (target.protectionMode != null) return target.protectionMode;
+ return target.kind === "file" ? "direct" : "recursive";
+ };
+
+ for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext))
+ addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source);
+ if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) {
+ for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, {
+ workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path,
+ })) {
+ addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher);
+ }
+ }
+
+ for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) {
+ addProtectRule(
+ rule.path,
+ rule.protectionMode,
+ rule.reason ?? "configured cleanup protection rule",
+ "configured-cleanup-protection",
+ rule.matcher ?? "path",
+ );
+ }
+
+ for (const snapshot of pluginSnapshots) {
+ for (const declaration of snapshot.outputs) {
+ const resolvedOutputPath = resolveAbsolutePath(declaration.path);
+ addDeletePath(resolvedOutputPath, "file");
+ const existingOwners = outputPathOwners.get(resolvedOutputPath);
+ if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name]);
+ else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name);
+ }
+ for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob));
+ }
+
+ const excludeScanGlobs = [...excludeScanGlobSet];
+
+ const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => {
+ for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) {
+ if (shouldExcludeCleanupMatch(matchedPath, target)) continue;
+
+ try {
+ const stat = fs.lstatSync(matchedPath);
+ if (stat.isDirectory()) addDeletePath(matchedPath, "directory");
+ else addDeletePath(matchedPath, "file");
+ } catch {}
+ }
+ };
+
+ const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => {
+ const protectionMode = defaultProtectionModeForTarget(target);
+ const reason = target.label != null ? `plugin cleanup protect declaration (${target.label})` : "plugin cleanup protect declaration";
+
+ for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) {
+ addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`);
+ }
+ };
+
+ for (const { plugin, cleanup } of pluginSnapshots) {
+ for (const target of cleanup.protect ?? []) {
+ if (target.kind === "glob") {
+ resolveProtectGlob(target, plugin.name);
+ continue;
+ }
+ addProtectRule(
+ target.path,
+ defaultProtectionModeForTarget(target),
+ target.label != null ? `plugin cleanup protect declaration (${target.label})` : "plugin cleanup protect declaration",
+ `plugin-cleanup-protect:${plugin.name}`,
+ );
+ }
+
+ for (const target of cleanup.delete ?? []) {
+ if (target.kind === "glob") {
+ resolveDeleteGlob(target);
+ continue;
+ }
+ if (target.kind === "directory") addDeletePath(target.path, "directory");
+ else addDeletePath(target.path, "file");
+ }
+ }
+
+ const guard = createProtectedDeletionGuard({
+ workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path,
+ projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext),
+ rules: [...protectedRules.values()],
+ ...(cleanCtx.collectedOutputContext.aindexDir != null ? { aindexDir: cleanCtx.collectedOutputContext.aindexDir } : {}),
+ });
+ const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard);
+ if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts);
+ const filePartition = partitionDeletionTargets([...deleteFiles], guard);
+ const dirPartition = partitionDeletionTargets([...deleteDirs], guard);
+
+ const compactedTargets = compactDeletionTargets(filePartition.safePaths, dirPartition.safePaths);
+ const emptyDirectoryPlan = planWorkspaceEmptyDirectoryCleanup({
+ fs,
+ path,
+ workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path,
+ filesToDelete: compactedTargets.files,
+ dirsToDelete: compactedTargets.dirs,
+ });
+
+ return {
+ filesToDelete: compactedTargets.files,
+ dirsToDelete: compactedTargets.dirs,
+ emptyDirsToDelete: emptyDirectoryPlan.emptyDirsToDelete,
+ violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)),
+ conflicts: [],
+ excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)),
+ };
+}
+
+export async function collectDeletionTargets(
+ outputPlugins: readonly OutputPlugin[],
+ cleanCtx: OutputCleanContext,
+ predeclaredOutputs?: ReadonlyMap,
+): Promise<{
+ filesToDelete: string[];
+ dirsToDelete: string[];
+ emptyDirsToDelete: string[];
+ violations: import("../../src/ProtectedDeletionGuard").ProtectedPathViolation[];
+ conflicts: CleanupProtectionConflict[];
+ excludedScanGlobs: string[];
+}> {
+ const targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs);
+ return {
+ filesToDelete: targets.filesToDelete,
+ dirsToDelete: targets.dirsToDelete.sort((a, b) => a.localeCompare(b)),
+ emptyDirsToDelete: targets.emptyDirsToDelete.sort((a, b) => a.localeCompare(b)),
+ violations: [...targets.violations],
+ conflicts: [...targets.conflicts],
+ excludedScanGlobs: [...targets.excludedScanGlobs],
+ };
+}
+
+function buildCleanupErrors(logger: ILogger, errors: readonly DeletionError[], type: "file" | "directory"): CleanupError[] {
+ return errors.map((currentError) => {
+ const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error);
+ logger.warn(
+ buildFileOperationDiagnostic({
+ code: type === "file" ? "CLEANUP_FILE_DELETE_FAILED" : "CLEANUP_DIRECTORY_DELETE_FAILED",
+ title: type === "file" ? "Cleanup could not delete a file" : "Cleanup could not delete a directory",
+ operation: "delete",
+ targetKind: type,
+ path: currentError.path,
+ error: errorMessage,
+ details: {
+ phase: "cleanup",
+ },
+ }),
+ );
+
+ return { path: currentError.path, type, error: currentError.error };
+ });
+}
+
+async function executeCleanupTargets(
+ targets: CleanupTargetCollections,
+ logger: ILogger,
+): Promise<{ deletedFiles: number; deletedDirs: number; errors: CleanupError[] }> {
+ logger.debug("cleanup delete execution started", {
+ filesToDelete: targets.filesToDelete.length,
+ dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length,
+ emptyDirsToDelete: targets.emptyDirsToDelete.length,
+ });
+
+ const result = await deskDeleteTargets({
+ files: targets.filesToDelete,
+ dirs: targets.dirsToDelete,
+ });
+ const emptyDirResult = await deleteEmptyDirectories(targets.emptyDirsToDelete);
+
+ const fileErrors = buildCleanupErrors(logger, result.fileErrors, "file");
+ const dirErrors = buildCleanupErrors(logger, [...result.dirErrors, ...emptyDirResult.errors], "directory");
+ const allErrors = [...fileErrors, ...dirErrors];
+
+ logger.debug("cleanup delete execution complete", {
+ deletedFiles: result.deletedFiles.length,
+ deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length,
+ errors: allErrors.length,
+ });
+
+ return {
+ deletedFiles: result.deletedFiles.length,
+ deletedDirs: result.deletedDirs.length + emptyDirResult.deletedPaths.length,
+ errors: allErrors,
+ };
+}
+
+function logCleanupPlanDiagnostics(logger: ILogger, targets: CleanupTargetCollections): void {
+ logger.debug("cleanup plan built", {
+ filesToDelete: targets.filesToDelete.length,
+ dirsToDelete: targets.dirsToDelete.length + targets.emptyDirsToDelete.length,
+ emptyDirsToDelete: targets.emptyDirsToDelete.length,
+ violations: targets.violations.length,
+ conflicts: targets.conflicts.length,
+ excludedScanGlobs: targets.excludedScanGlobs,
+ });
+}
+
+/**
+ * Perform cleanup operation for output plugins.
+ * This is the main reusable cleanup function that can be called from both
+ * CleanCommand and ExecuteCommand (for pre-cleanup).
+ */
+export async function performCleanup(
+ outputPlugins: readonly OutputPlugin[],
+ cleanCtx: OutputCleanContext,
+ logger: ILogger,
+ predeclaredOutputs?: ReadonlyMap,
+): Promise {
+ if (predeclaredOutputs != null) {
+ const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs);
+ logger.debug("Collected outputs for cleanup", {
+ projectDirs: outputs.projectDirs.length,
+ projectFiles: outputs.projectFiles.length,
+ globalDirs: outputs.globalDirs.length,
+ globalFiles: outputs.globalFiles.length,
+ });
+ }
+
+ let targets: CleanupTargetCollections;
+ try {
+ targets = await collectCleanupTargets(outputPlugins, cleanCtx, predeclaredOutputs);
+ } catch (error) {
+ if (error instanceof CleanupProtectionConflictError) {
+ logCleanupProtectionConflicts(logger, error.conflicts);
+ return {
+ deletedFiles: 0,
+ deletedDirs: 0,
+ errors: [],
+ violations: [],
+ conflicts: error.conflicts,
+ message: error.message,
+ };
+ }
+ throw error;
+ }
+ const cleanupTargets: CleanupTargetCollections = {
+ filesToDelete: targets.filesToDelete,
+ dirsToDelete: targets.dirsToDelete,
+ emptyDirsToDelete: targets.emptyDirsToDelete,
+ violations: targets.violations,
+ conflicts: targets.conflicts,
+ excludedScanGlobs: targets.excludedScanGlobs,
+ };
+ logCleanupPlanDiagnostics(logger, cleanupTargets);
+
+ if (cleanupTargets.violations.length > 0) {
+ logProtectedDeletionGuardError(logger, "cleanup", cleanupTargets.violations);
+ return {
+ deletedFiles: 0,
+ deletedDirs: 0,
+ errors: [],
+ violations: cleanupTargets.violations,
+ conflicts: [],
+ message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)`,
+ };
+ }
+
+ const executionResult = await executeCleanupTargets(cleanupTargets, logger);
+
+ return {
+ deletedFiles: executionResult.deletedFiles,
+ deletedDirs: executionResult.deletedDirs,
+ errors: executionResult.errors,
+ violations: [],
+ conflicts: [],
+ };
+}
diff --git a/cli/src/core/desk-paths-fallback.ts b/cli/test/native-binding/desk-paths.ts
similarity index 88%
rename from cli/src/core/desk-paths-fallback.ts
rename to cli/test/native-binding/desk-paths.ts
index 8396cd7e..289cf5d6 100644
--- a/cli/src/core/desk-paths-fallback.ts
+++ b/cli/test/native-binding/desk-paths.ts
@@ -1,5 +1,5 @@
import type {Buffer} from 'node:buffer'
-import type {LoggerDiagnosticInput} from '../plugins/plugin-core'
+import type {LoggerDiagnosticInput} from '../../src/plugins/plugin-core'
import * as fs from 'node:fs'
import path from 'node:path'
import process from 'node:process'
@@ -105,6 +105,18 @@ async function deletePath(p: string): Promise {
}
}
+async function deleteEmptyDirectory(p: string): Promise {
+ try {
+ await fs.promises.rmdir(p)
+ return true
+ }
+ catch (error) {
+ const {code} = error as NodeJS.ErrnoException
+ if (code === 'ENOENT' || code === 'ENOTEMPTY') return false
+ throw error
+ }
+}
+
async function mapWithConcurrencyLimit(
items: readonly T[],
concurrency: number,
@@ -180,6 +192,28 @@ export async function deleteDirectories(dirs: readonly string[]): Promise {
+ const sortedPaths = [...dirs].sort((a, b) => b.length - a.length || b.localeCompare(a))
+ const deletedPaths: string[] = []
+ const errors: DeletionError[] = []
+
+ for (const currentPath of sortedPaths) {
+ try {
+ const deleted = await deleteEmptyDirectory(currentPath)
+ if (deleted) deletedPaths.push(currentPath)
+ }
+ catch (error) {
+ errors.push({path: currentPath, error})
+ }
+ }
+
+ return {
+ deleted: deletedPaths.length,
+ deletedPaths,
+ errors
+ }
+}
+
export async function deleteTargets(targets: {
readonly files?: readonly string[]
readonly dirs?: readonly string[]
diff --git a/cli/test/setup-native-binding.ts b/cli/test/setup-native-binding.ts
new file mode 100644
index 00000000..da543ed2
--- /dev/null
+++ b/cli/test/setup-native-binding.ts
@@ -0,0 +1,215 @@
+import type { ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin } from "../src/plugins/plugin-core";
+import * as fs from "node:fs";
+import * as path from "node:path";
+import glob from "fast-glob";
+import * as deskPaths from "./native-binding/desk-paths";
+import { FilePathKind, PluginKind } from "../src/plugins/plugin-core/enums";
+
+interface NativeCleanupTarget {
+ readonly path: string;
+ readonly kind: "file" | "directory" | "glob";
+ readonly excludeBasenames?: readonly string[];
+ readonly protectionMode?: "direct" | "recursive";
+ readonly scope?: string;
+ readonly label?: string;
+}
+
+interface NativeCleanupDeclarations {
+ readonly delete?: readonly NativeCleanupTarget[];
+ readonly protect?: readonly NativeCleanupTarget[];
+ readonly excludeScanGlobs?: readonly string[];
+}
+
+interface NativePluginCleanupSnapshot {
+ readonly pluginName: string;
+ readonly outputs: readonly string[];
+ readonly cleanup: NativeCleanupDeclarations;
+}
+
+interface NativeProtectedRule {
+ readonly path: string;
+ readonly protectionMode: "direct" | "recursive";
+ readonly reason: string;
+ readonly source: string;
+ readonly matcher?: "path" | "glob";
+}
+
+interface NativeCleanupSnapshot {
+ readonly workspaceDir: string;
+ readonly aindexDir?: string;
+ readonly projectRoots: readonly string[];
+ readonly protectedRules: readonly NativeProtectedRule[];
+ readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[];
+}
+
+function createMockLogger(): ILogger {
+ return {
+ trace: () => {},
+ debug: () => {},
+ info: () => {},
+ warn: () => {},
+ error: () => {},
+ fatal: () => {},
+ } as ILogger;
+}
+
+function createSyntheticOutputPlugin(snapshot: NativePluginCleanupSnapshot): OutputPlugin {
+ return {
+ type: PluginKind.Output,
+ name: snapshot.pluginName,
+ log: createMockLogger(),
+ declarativeOutput: true,
+ outputCapabilities: {},
+ async declareOutputFiles() {
+ return snapshot.outputs.map((output) => ({ path: output, source: {} }));
+ },
+ async declareCleanupPaths(): Promise {
+ return {
+ ...(snapshot.cleanup.delete != null ? { delete: [...snapshot.cleanup.delete] as OutputCleanupDeclarations["delete"] } : {}),
+ ...(snapshot.cleanup.protect != null ? { protect: [...snapshot.cleanup.protect] as OutputCleanupDeclarations["protect"] } : {}),
+ ...(snapshot.cleanup.excludeScanGlobs != null ? { excludeScanGlobs: [...snapshot.cleanup.excludeScanGlobs] } : {}),
+ };
+ },
+ async convertContent() {
+ return "";
+ },
+ };
+}
+
+async function createSyntheticCleanContext(snapshot: NativeCleanupSnapshot): Promise {
+ const { mergeConfig } = await import("../src/config");
+ const workspaceDir = path.resolve(snapshot.workspaceDir);
+ const cleanupProtectionRules = snapshot.protectedRules.map((rule) => ({
+ path: rule.path,
+ protectionMode: rule.protectionMode,
+ reason: rule.reason,
+ matcher: rule.matcher ?? "path",
+ }));
+
+ if (snapshot.aindexDir != null) {
+ cleanupProtectionRules.push({
+ path: snapshot.aindexDir,
+ protectionMode: "direct",
+ reason: "resolved aindex root",
+ matcher: "path",
+ });
+ }
+
+ return {
+ logger: createMockLogger(),
+ fs,
+ path,
+ glob,
+ dryRun: false,
+ pluginOptions: mergeConfig({
+ workspaceDir,
+ cleanupProtection: {
+ rules: cleanupProtectionRules,
+ },
+ }),
+ collectedOutputContext: {
+ workspace: {
+ directory: {
+ pathKind: FilePathKind.Absolute,
+ path: workspaceDir,
+ getDirectoryName: () => path.basename(workspaceDir),
+ getAbsolutePath: () => workspaceDir,
+ },
+ projects: snapshot.projectRoots.map((projectRoot) => ({
+ dirFromWorkspacePath: {
+ pathKind: FilePathKind.Relative,
+ path: path.relative(workspaceDir, projectRoot) || ".",
+ basePath: workspaceDir,
+ getDirectoryName: () => path.basename(projectRoot),
+ getAbsolutePath: () => projectRoot,
+ },
+ })),
+ },
+ },
+ } as unknown as OutputCleanContext;
+}
+
+async function planCleanup(snapshotJson: string): Promise {
+ const { collectDeletionTargets } = await import("./native-binding/cleanup");
+ const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot;
+ const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin);
+ const cleanCtx = await createSyntheticCleanContext(snapshot);
+ const result = await collectDeletionTargets(outputPlugins, cleanCtx);
+
+ return JSON.stringify({
+ filesToDelete: result.filesToDelete,
+ dirsToDelete: result.dirsToDelete,
+ emptyDirsToDelete: result.emptyDirsToDelete,
+ violations: result.violations,
+ conflicts: result.conflicts,
+ excludedScanGlobs: result.excludedScanGlobs,
+ });
+}
+
+async function runCleanup(snapshotJson: string): Promise {
+ const { performCleanup } = await import("./native-binding/cleanup");
+ const snapshot = JSON.parse(snapshotJson) as NativeCleanupSnapshot;
+ const outputPlugins = snapshot.pluginSnapshots.map(createSyntheticOutputPlugin);
+ const cleanCtx = await createSyntheticCleanContext(snapshot);
+ const result = await performCleanup(outputPlugins, cleanCtx, createMockLogger());
+
+ return JSON.stringify({
+ deletedFiles: result.deletedFiles,
+ deletedDirs: result.deletedDirs,
+ errors: result.errors.map((error) => ({
+ path: error.path,
+ kind: error.type,
+ error: error.error instanceof Error ? error.error.message : String(error.error),
+ })),
+ violations: result.violations,
+ conflicts: result.conflicts,
+ filesToDelete: [],
+ dirsToDelete: [],
+ emptyDirsToDelete: [],
+ excludedScanGlobs: [],
+ });
+}
+
+function resolveEffectiveIncludeSeries(topLevel?: readonly string[], typeSpecific?: readonly string[]): string[] {
+ if (topLevel == null && typeSpecific == null) return [];
+ return [...new Set([...(topLevel ?? []), ...(typeSpecific ?? [])])];
+}
+
+function matchesSeries(seriName: string | readonly string[] | null | undefined, effectiveIncludeSeries: readonly string[]): boolean {
+ if (seriName == null) return true;
+ if (effectiveIncludeSeries.length === 0) return true;
+ if (typeof seriName === "string") return effectiveIncludeSeries.includes(seriName);
+ return seriName.some((name) => effectiveIncludeSeries.includes(name));
+}
+
+function resolveSubSeries(
+ topLevel?: Readonly>,
+ typeSpecific?: Readonly>,
+): Record {
+ if (topLevel == null && typeSpecific == null) return {};
+ const merged: Record = {};
+ for (const [key, values] of Object.entries(topLevel ?? {})) merged[key] = [...values];
+ for (const [key, values] of Object.entries(typeSpecific ?? {})) {
+ const existingValues = merged[key] ?? [];
+ merged[key] = Object.hasOwn(merged, key) ? [...new Set([...existingValues, ...values])] : [...values];
+ }
+ return merged;
+}
+
+globalThis.__TNMSC_TEST_NATIVE_BINDING__ = {
+ getPlatformFixedDir: deskPaths.getPlatformFixedDir,
+ ensureDir: deskPaths.ensureDir,
+ existsSync: deskPaths.existsSync,
+ deletePathSync: deskPaths.deletePathSync,
+ writeFileSync: deskPaths.writeFileSync,
+ readFileSync: deskPaths.readFileSync,
+ deleteFiles: deskPaths.deleteFiles,
+ deleteDirectories: deskPaths.deleteDirectories,
+ deleteEmptyDirectories: deskPaths.deleteEmptyDirectories,
+ deleteTargets: deskPaths.deleteTargets,
+ planCleanup,
+ performCleanup: runCleanup,
+ resolveEffectiveIncludeSeries,
+ matchesSeries,
+ resolveSubSeries,
+};
diff --git a/cli/tsconfig.test.json b/cli/tsconfig.test.json
index 65c3c9ad..094bf0e6 100644
--- a/cli/tsconfig.test.json
+++ b/cli/tsconfig.test.json
@@ -14,6 +14,7 @@
"include": [
"src/**/*.spec.ts",
"src/**/*.test.ts",
+ "test/**/*.ts",
"vitest.config.ts",
"vite.config.ts",
"env.d.ts"
diff --git a/cli/vitest.config.ts b/cli/vitest.config.ts
index 57ea93fe..c80ffd11 100644
--- a/cli/vitest.config.ts
+++ b/cli/vitest.config.ts
@@ -12,6 +12,7 @@ export default mergeConfig(
passWithNoTests: true,
exclude: [...configDefaults.exclude, 'e2e/*'],
root: fileURLToPath(new URL('./', import.meta.url)),
+ setupFiles: ['./test/setup-native-binding.ts'],
typecheck: {
enabled: true,
tsconfig: './tsconfig.test.json'
diff --git a/doc/app/home-page.mdx b/doc/app/home-page.mdx
index e58babf1..45f80343 100644
--- a/doc/app/home-page.mdx
+++ b/doc/app/home-page.mdx
@@ -30,7 +30,7 @@ import {
{siteConfig.productName}
{' '}
以 MDX 维护源内容,通过 Rust-first / NAPI-first pipeline 将 prompts、rules、skills、commands
- 与 workspace memory 物化为目标工具原生配置。
+ 与 project memory 物化为目标工具原生配置。
diff --git a/doc/content/cli/_meta.ts b/doc/content/cli/_meta.ts
index b9ddf6f9..bc38a43f 100644
--- a/doc/content/cli/_meta.ts
+++ b/doc/content/cli/_meta.ts
@@ -1,7 +1,7 @@
export default {
'index': '概览',
'install': '安装与前提',
- 'workspace-setup': '工作区与 aindex',
+ 'workspace-setup': '项目与 aindex',
'first-sync': '第一次同步',
'migration': '从旧文档迁移',
'cli-commands': 'CLI 命令',
diff --git a/doc/content/cli/cli-commands.mdx b/doc/content/cli/cli-commands.mdx
index b8a4df2c..3731fb94 100644
--- a/doc/content/cli/cli-commands.mdx
+++ b/doc/content/cli/cli-commands.mdx
@@ -16,8 +16,8 @@ status: stable
| `tnmsc version` | 查看版本 |
| `tnmsc init` | 已废弃,不再初始化 aindex |
| `tnmsc dry-run` | 预览会写出的文件 |
-| `tnmsc clean` | 删除已生成的输出文件 |
-| `tnmsc clean --dry-run` | 预览会清理什么 |
+| `tnmsc clean` | 删除已生成的输出文件,并继续清理 project 源码树中的空目录 |
+| `tnmsc clean --dry-run` | 预览会清理什么,包括后续会被回收的空目录 |
| `tnmsc config key=value` | 修改全局配置 |
## 关键结论
diff --git a/doc/content/cli/dry-run-and-clean.mdx b/doc/content/cli/dry-run-and-clean.mdx
index 499a3b5f..66e4ddb6 100644
--- a/doc/content/cli/dry-run-and-clean.mdx
+++ b/doc/content/cli/dry-run-and-clean.mdx
@@ -20,12 +20,16 @@ status: stable
`tnmsc clean` 用来清除已生成的输出文件。它不是“无差别删目录”,而是依据当前输出模型和清理保护规则执行。
+在完成常规 cleanup 之后,`tnmsc clean` 还会继续扫描当前 project 的源码树,把剩余的空目录一并清掉。这个 empty-dir sweep 会明确跳过 Git 内部目录,以及依赖、构建产物、缓存一类目录树。
+
在真正执行前,优先用:
```sh
tnmsc clean --dry-run
```
+`--dry-run` 也会把这些后续会被清掉的空目录一起展示出来。
+
## 风险边界
如果输出目录里混有人工文件或其他工具产物,必须先看 [清理保护](/docs/cli/cleanup-protection)。没有保护规则时,`clean` 的风险会显著上升。
diff --git a/doc/content/cli/index.mdx b/doc/content/cli/index.mdx
index 7f21022f..37d05c97 100644
--- a/doc/content/cli/index.mdx
+++ b/doc/content/cli/index.mdx
@@ -1,18 +1,18 @@
---
title: CLI
-description: 以 tnmsc 命令面为中心组织安装、工作区准备、同步流程、配置字段与排障信息。
+description: 以 tnmsc 命令面为中心组织安装、项目准备、同步流程、配置字段与排障信息。
sidebarTitle: 概览
status: stable
---
# CLI
-这一节围绕 `tnmsc` 这个公开命令面组织。凡是“怎么安装”“怎么准备工作区”“怎么执行同步”“某个配置字段到底是什么意思”这类问题,都优先从这里进入。
+这一节围绕 `tnmsc` 这个公开命令面组织。凡是“怎么安装”“怎么准备项目”“怎么执行同步”“某个配置字段到底是什么意思”这类问题,都优先从这里进入。
## 这一节包含什么
- [安装与前提](/docs/cli/install):确认 Node、pnpm、Rust 与 GUI 的更高开发引擎边界。
-- [工作区与 aindex](/docs/cli/workspace-setup):准备源文件目录与项目配置入口。
+- [项目与 aindex](/docs/cli/workspace-setup):准备源文件目录与项目配置入口。
- [第一次同步](/docs/cli/first-sync):按推荐顺序跑 `help`、`dry-run` 与真实写入。
- [CLI 命令](/docs/cli/cli-commands):核对 `tnmsc --help` 当前公开的命令表面。
- [dry-run 与 clean](/docs/cli/dry-run-and-clean):先预览、再写入、再清理。
@@ -23,6 +23,6 @@ status: stable
## 推荐顺序
1. 先读 [安装与前提](/docs/cli/install)。
-2. 再读 [工作区与 aindex](/docs/cli/workspace-setup)。
+2. 再读 [项目与 aindex](/docs/cli/workspace-setup)。
3. 接着用 [第一次同步](/docs/cli/first-sync) 跑通一次真实流程。
4. 需要核对事实时,再回看 [CLI 命令](/docs/cli/cli-commands) 与 [JSON Schema](/docs/cli/schema)。
diff --git a/doc/content/cli/schema.mdx b/doc/content/cli/schema.mdx
index 45e3f455..92e21742 100644
--- a/doc/content/cli/schema.mdx
+++ b/doc/content/cli/schema.mdx
@@ -12,7 +12,7 @@ status: stable
| 字段 | 类型 | 说明 |
| --- | --- | --- |
| `version` | `string` | 配置版本或发布版本标记 |
-| `workspaceDir` | `string` | 工作区根目录 |
+| `workspaceDir` | `string` | 项目根目录 |
| `aindex` | `object` | 输入源与导出目录映射 |
| `logLevel` | enum | `trace` / `debug` / `info` / `warn` / `error` |
| `commandSeriesOptions` | `object` | command 系列命名与插件覆盖 |
diff --git a/doc/content/cli/troubleshooting.mdx b/doc/content/cli/troubleshooting.mdx
index 268f12c5..480e94c1 100644
--- a/doc/content/cli/troubleshooting.mdx
+++ b/doc/content/cli/troubleshooting.mdx
@@ -21,7 +21,7 @@ status: stable
1. [plugin.config.ts](/docs/cli/plugin-config) 中到底装了哪些输出插件。
2. [输出范围](/docs/cli/output-scopes) 有没有按 topic 限制范围。
-3. 你写的是 global 还是 workspace 源。
+3. 你写的是 global 还是项目 Prompt 源。
## 现象:`clean` 不敢执行或执行后删过头
diff --git a/doc/content/cli/workspace-setup.mdx b/doc/content/cli/workspace-setup.mdx
index b598858d..2ae5fe61 100644
--- a/doc/content/cli/workspace-setup.mdx
+++ b/doc/content/cli/workspace-setup.mdx
@@ -1,11 +1,11 @@
---
-title: 工作区与 aindex
+title: 项目与 aindex
description: 说明 aindex 源目录、全局配置与项目配置入口在当前仓库中的职责分工。
-sidebarTitle: 工作区与 aindex
+sidebarTitle: 项目与 aindex
status: stable
---
-# 工作区与 aindex
+# 项目与 aindex
## 先记住这件事
@@ -13,7 +13,7 @@ status: stable
## 你至少需要准备什么
-- 一个工作区根目录
+- 一个项目根目录
- aindex 风格的源内容目录
- 全局配置文件
- 项目级 `plugin.config.ts`
@@ -22,7 +22,7 @@ status: stable
当前文档里单独拆出的输入资产包括:
-- 全局 Prompt 与工作区 Prompt
+- 全局 Prompt 与项目 Prompt
- `skills/`
- `commands/`
- `subagents/`
diff --git a/doc/content/index.mdx b/doc/content/index.mdx
index af3da22b..37e8564f 100644
--- a/doc/content/index.mdx
+++ b/doc/content/index.mdx
@@ -17,7 +17,7 @@ keywords:
| 门类 | 主要回答的问题 | 入口 |
| --- | --- | --- |
-| CLI | 如何安装、准备工作区、执行同步、理解命令与配置字段 | [CLI](/docs/cli) |
+| CLI | 如何安装、准备项目、执行同步、理解命令与配置字段 | [CLI](/docs/cli) |
| MCP | `memory-sync-mcp` 是什么、暴露了哪些工具、适合怎么接入 | [MCP](/docs/mcp) |
| GUI | 桌面层负责什么、有哪些页面、它与 `tnmsc` crate / CLI 如何配合 | [GUI](/docs/gui) |
| 技术细节 | 架构边界、同步管线、真源模型,以及 prompts / skills / commands / rules 等输入资产如何组织 | [技术细节](/docs/technical-details) |
@@ -25,7 +25,7 @@ keywords:
## 从哪里开始
-- 第一次使用 `memory-sync`,从 [CLI](/docs/cli) 开始,先把安装、工作区准备和第一次同步跑通。
+- 第一次使用 `memory-sync`,从 [CLI](/docs/cli) 开始,先把安装、项目准备和第一次同步跑通。
- 需要把 `memory-sync-mcp` 接入到支持 MCP 的宿主里,直接进 [MCP](/docs/mcp)。
- 关注桌面应用而不是终端入口时,查看 [GUI](/docs/gui)。
- 需要理解 Rust-first / NAPI-first、真源模型和输入资产职责时,进入 [技术细节](/docs/technical-details)。
diff --git a/doc/content/mcp/index.mdx b/doc/content/mcp/index.mdx
index 57678e13..38df0ecc 100644
--- a/doc/content/mcp/index.mdx
+++ b/doc/content/mcp/index.mdx
@@ -24,5 +24,5 @@ status: stable
## 推荐阅读
- [Server 与 Tools](/docs/mcp/server-tools):查看 stdio server 的入口、工具清单与 `workspaceDir` 语义。
-- [CLI](/docs/cli):需要了解工作区准备、Schema 与输出边界时,仍然回到 CLI 侧核对事实。
+- [CLI](/docs/cli):需要了解项目准备、Schema 与输出边界时,仍然回到 CLI 侧核对事实。
- [技术细节](/docs/technical-details/source-of-truth):需要理解 prompt 资产为什么这样建模时,从真源模型开始。
diff --git a/doc/content/mcp/server-tools.mdx b/doc/content/mcp/server-tools.mdx
index ef5a0f2d..700d17b1 100644
--- a/doc/content/mcp/server-tools.mdx
+++ b/doc/content/mcp/server-tools.mdx
@@ -58,12 +58,12 @@ status: stable
## `workspaceDir` 的作用
-这些工具都接受可选的 `workspaceDir`。它的语义不是“随便切目录”,而是把 prompt service 的工作区根显式绑定到某个项目上下文。
+这些工具都接受可选的 `workspaceDir`。它的语义不是“随便切目录”,而是把 prompt service 的项目根显式绑定到某个项目上下文。
如果不传,server 会按默认当前工作目录解释;如果传入,则会同时进入 `cwd` 与 `pluginOptions.workspaceDir`。
## 使用边界
-- 先保证 CLI 侧的工作区结构是正确的,再接入 MCP
+- 先保证 CLI 侧的项目结构是正确的,再接入 MCP
- MCP 只暴露当前已有的 prompt 管理行为,不替你发明新的 Schema
- 需要确认 prompt 类型与真源职责时,回看 [技术细节](/docs/technical-details)
diff --git a/doc/content/technical-details/_meta.ts b/doc/content/technical-details/_meta.ts
index 38b7a67f..9e91cca0 100644
--- a/doc/content/technical-details/_meta.ts
+++ b/doc/content/technical-details/_meta.ts
@@ -4,7 +4,7 @@ export default {
'pipeline': '同步管线',
'source-of-truth': '单一真源模型',
'documentation-components': '文档组件',
- 'global-and-workspace-prompts': '全局与工作区 Prompt',
+ 'global-and-workspace-prompts': '全局与项目 Prompt',
'skills': 'Skills',
'commands': 'Commands',
'subagents': 'Sub-agents',
diff --git a/doc/content/technical-details/global-and-workspace-prompts.mdx b/doc/content/technical-details/global-and-workspace-prompts.mdx
index d98bacfb..f32536b2 100644
--- a/doc/content/technical-details/global-and-workspace-prompts.mdx
+++ b/doc/content/technical-details/global-and-workspace-prompts.mdx
@@ -1,11 +1,11 @@
---
-title: 全局与工作区 Prompt
+title: 全局与项目 Prompt
description: 说明 globalPrompt 与 workspacePrompt 的路径职责和适用范围。
-sidebarTitle: 全局与工作区 Prompt
+sidebarTitle: 全局与项目 Prompt
status: stable
---
-# 全局与工作区 Prompt
+# 全局与项目 Prompt
## 默认路径
@@ -41,6 +41,6 @@ dist/workspace.mdx
## 编写原则
-1. 能放 workspace 的,不要抢着放 global。
+1. 能放项目 Prompt 的,不要抢着放 global。
2. 不要把目标工具特有路径和格式硬编码进真源。
3. 如果某段内容本质上更像可复用卡片,应优先考虑抽成 skill 或 rule,而不是继续把 Prompt 膨胀成一整面墙。
diff --git a/doc/content/technical-details/index.mdx b/doc/content/technical-details/index.mdx
index ac8978c3..f381bf8c 100644
--- a/doc/content/technical-details/index.mdx
+++ b/doc/content/technical-details/index.mdx
@@ -21,7 +21,7 @@ status: stable
- [架构边界](/docs/technical-details/architecture)
- [单一真源模型](/docs/technical-details/source-of-truth)
- [同步管线](/docs/technical-details/pipeline)
-- [全局与工作区 Prompt](/docs/technical-details/global-and-workspace-prompts)
+- [全局与项目 Prompt](/docs/technical-details/global-and-workspace-prompts)
- [Skills](/docs/technical-details/skills)
- [Commands](/docs/technical-details/commands)
- [Sub-agents](/docs/technical-details/subagents)
diff --git a/doc/content/technical-details/pipeline.mdx b/doc/content/technical-details/pipeline.mdx
index 8bb2cb8d..c90dcb46 100644
--- a/doc/content/technical-details/pipeline.mdx
+++ b/doc/content/technical-details/pipeline.mdx
@@ -12,7 +12,7 @@ status: stable
## 管线最少包含什么
- 读取真源输入资产
-- 合并工作区与全局配置
+- 合并项目与全局配置
- 按插件决定输出目标
- 应用 `outputScopes` 约束
- 写入目标文件
diff --git a/doc/content/technical-details/source-of-truth.mdx b/doc/content/technical-details/source-of-truth.mdx
index 887ed24b..088cfcb9 100644
--- a/doc/content/technical-details/source-of-truth.mdx
+++ b/doc/content/technical-details/source-of-truth.mdx
@@ -12,7 +12,7 @@ status: stable
## 哪些通常是真源
- 全局 Prompt
-- 工作区 Prompt
+- 项目 Prompt
- `skills/`
- `commands/`
- `subagents/`
diff --git a/doc/lib/site.ts b/doc/lib/site.ts
index c9cb03dd..85c78e6d 100644
--- a/doc/lib/site.ts
+++ b/doc/lib/site.ts
@@ -5,7 +5,7 @@ export const siteConfig = {
shortName: 'memory-sync Docs',
title: 'memory-sync 文档',
description:
- '面向多 AI 工具的 prompt、rule、skill、command 与 workspace memory sync 文档站。',
+ '面向多 AI 工具的 prompt、rule、skill、command 与 project memory sync 文档站。',
repoUrl: 'https://github.com/TrueNine/memory-sync',
docsRepositoryBase: 'https://github.com/TrueNine/memory-sync/blob/main/doc',
issueUrl: 'https://github.com/TrueNine/memory-sync/issues/new/choose'
@@ -34,7 +34,7 @@ export const homeEntryCards = [
{
href: '/docs/cli',
title: 'CLI',
- detail: '围绕安装、工作区准备、第一次同步、配置字段与命令表面组织。'
+ detail: '围绕安装、项目准备、第一次同步、配置字段与命令表面组织。'
},
{
href: '/docs/mcp',
@@ -92,14 +92,14 @@ export const readingPath = [
{
step: '02',
href: '/docs/cli/workspace-setup',
- title: '准备工作区结构',
+ title: '准备项目结构',
description: '按照 aindex 与项目配置的真实目录约定组织源文件。'
},
{
step: '03',
href: '/docs/technical-details/source-of-truth',
title: '开始维护源内容',
- description: '先建立真源模型,再区分全局 Prompt、工作区 Prompt 与其他输入资产。'
+ description: '先建立真源模型,再区分全局 Prompt、项目 Prompt 与其他输入资产。'
},
{
step: '04',
From 669f4090463f553992ceded2616e5e675fe32d4c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?=
Date: Mon, 30 Mar 2026 08:37:14 +0800
Subject: [PATCH 3/3] Bump package versions to 2026.10330.108
---
Cargo.toml | 2 +-
cli/npm/darwin-arm64/package.json | 2 +-
cli/npm/darwin-x64/package.json | 2 +-
cli/npm/linux-arm64-gnu/package.json | 2 +-
cli/npm/linux-x64-gnu/package.json | 2 +-
cli/npm/win32-x64-msvc/package.json | 2 +-
cli/package.json | 2 +-
doc/package.json | 2 +-
gui/package.json | 2 +-
gui/src-tauri/Cargo.toml | 2 +-
gui/src-tauri/tauri.conf.json | 2 +-
libraries/logger/package.json | 2 +-
libraries/md-compiler/package.json | 2 +-
libraries/script-runtime/package.json | 2 +-
mcp/package.json | 2 +-
package.json | 2 +-
16 files changed, 16 insertions(+), 16 deletions(-)
diff --git a/Cargo.toml b/Cargo.toml
index f44a85d3..ab153449 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -9,7 +9,7 @@ members = [
]
[workspace.package]
-version = "2026.10329.110"
+version = "2026.10330.108"
edition = "2024"
license = "AGPL-3.0-only"
authors = ["TrueNine"]
diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json
index c6be1977..536295d6 100644
--- a/cli/npm/darwin-arm64/package.json
+++ b/cli/npm/darwin-arm64/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-cli-darwin-arm64",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"os": [
"darwin"
],
diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json
index 96f2661f..a35474d3 100644
--- a/cli/npm/darwin-x64/package.json
+++ b/cli/npm/darwin-x64/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-cli-darwin-x64",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"os": [
"darwin"
],
diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json
index 1a16a292..e38b50cf 100644
--- a/cli/npm/linux-arm64-gnu/package.json
+++ b/cli/npm/linux-arm64-gnu/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-cli-linux-arm64-gnu",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"os": [
"linux"
],
diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json
index faf1796d..60b108c1 100644
--- a/cli/npm/linux-x64-gnu/package.json
+++ b/cli/npm/linux-x64-gnu/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-cli-linux-x64-gnu",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"os": [
"linux"
],
diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json
index 1799e803..6cd15973 100644
--- a/cli/npm/win32-x64-msvc/package.json
+++ b/cli/npm/win32-x64-msvc/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-cli-win32-x64-msvc",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"os": [
"win32"
],
diff --git a/cli/package.json b/cli/package.json
index 8223fa97..8cd72ad5 100644
--- a/cli/package.json
+++ b/cli/package.json
@@ -1,7 +1,7 @@
{
"name": "@truenine/memory-sync-cli",
"type": "module",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"description": "TrueNine Memory Synchronization CLI",
"author": "TrueNine",
"license": "AGPL-3.0-only",
diff --git a/doc/package.json b/doc/package.json
index 392dba5e..52ce54c3 100644
--- a/doc/package.json
+++ b/doc/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-docs",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"private": true,
"description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.",
"engines": {
diff --git a/gui/package.json b/gui/package.json
index 7a454edc..853f6d13 100644
--- a/gui/package.json
+++ b/gui/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync-gui",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"private": true,
"engines": {
"node": ">=25.2.1",
diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml
index 73f767de..47750281 100644
--- a/gui/src-tauri/Cargo.toml
+++ b/gui/src-tauri/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "memory-sync-gui"
-version = "2026.10329.110"
+version = "2026.10330.108"
description = "Memory Sync desktop GUI application"
authors.workspace = true
edition.workspace = true
diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json
index af5ab62e..d4a37ce9 100644
--- a/gui/src-tauri/tauri.conf.json
+++ b/gui/src-tauri/tauri.conf.json
@@ -1,6 +1,6 @@
{
"$schema": "https://schema.tauri.app/config/2",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"productName": "Memory Sync",
"identifier": "org.truenine.memory-sync",
"build": {
diff --git a/libraries/logger/package.json b/libraries/logger/package.json
index f58407bd..6bcdc6a4 100644
--- a/libraries/logger/package.json
+++ b/libraries/logger/package.json
@@ -1,7 +1,7 @@
{
"name": "@truenine/logger",
"type": "module",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"private": true,
"description": "Rust-powered structured logger for Node.js via N-API",
"license": "AGPL-3.0-only",
diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json
index 1cc34c46..29a1583a 100644
--- a/libraries/md-compiler/package.json
+++ b/libraries/md-compiler/package.json
@@ -1,7 +1,7 @@
{
"name": "@truenine/md-compiler",
"type": "module",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"private": true,
"description": "Rust-powered MDX→Markdown compiler for Node.js with pure-TS fallback",
"license": "AGPL-3.0-only",
diff --git a/libraries/script-runtime/package.json b/libraries/script-runtime/package.json
index ce82c38c..cc8fe1d4 100644
--- a/libraries/script-runtime/package.json
+++ b/libraries/script-runtime/package.json
@@ -1,7 +1,7 @@
{
"name": "@truenine/script-runtime",
"type": "module",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"private": true,
"description": "Rust-backed TypeScript proxy runtime for tnmsc",
"license": "AGPL-3.0-only",
diff --git a/mcp/package.json b/mcp/package.json
index a3331948..b820138f 100644
--- a/mcp/package.json
+++ b/mcp/package.json
@@ -1,7 +1,7 @@
{
"name": "@truenine/memory-sync-mcp",
"type": "module",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"description": "MCP stdio server for managing memory-sync prompt sources and translation artifacts",
"author": "TrueNine",
"license": "AGPL-3.0-only",
diff --git a/package.json b/package.json
index 8607f518..36c5c462 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@truenine/memory-sync",
- "version": "2026.10329.110",
+ "version": "2026.10330.108",
"description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.",
"license": "AGPL-3.0-only",
"keywords": [