diff --git a/Cargo.lock b/Cargo.lock index 5ae09b55..966cae74 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -244,6 +244,16 @@ dependencies = [ "alloc-stdlib", ] +[[package]] +name = "bstr" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "bumpalo" version = "3.20.2" @@ -1342,6 +1352,19 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" +[[package]] +name = "globset" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52dfc19153a48bde0cbd630453615c8151bce3a5adfac7a0aebfbf0a1e1f57e3" +dependencies = [ + "aho-corasick", + "bstr", + "log", + "regex-automata", + "regex-syntax", +] + [[package]] name = "gobject-sys" version = "0.18.0" @@ -4353,6 +4376,7 @@ version = "2026.10327.10010" dependencies = [ "clap", "dirs", + "globset", "napi", "napi-build", "napi-derive", @@ -4365,6 +4389,7 @@ dependencies = [ "thiserror 2.0.18", "tnmsc-logger", "tnmsc-md-compiler", + "walkdir", ] [[package]] diff --git a/cli/Cargo.toml b/cli/Cargo.toml index c50993d8..7e6ebbaf 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -33,6 +33,8 @@ sha2 = { workspace = true } napi = { workspace = true, optional = true } napi-derive = { workspace = true, optional = true } reqwest = { version = "0.13.2", default-features = false, features = ["blocking", "json", "rustls"] } +globset = "0.4.16" +walkdir = "2.5.0" [dev-dependencies] proptest = "1.10.0" diff --git a/cli/package.json b/cli/package.json index abbe930d..298e5049 100644 --- a/cli/package.json +++ b/cli/package.json @@ -58,7 +58,9 @@ "lint": "eslint --cache .", "prepublishOnly": "run-s build", "test": "run-s build:deps test:run", + "test:native-cleanup-smoke": "tsx scripts/cleanup-native-smoke.ts", "test:run": "vitest run", + "benchmark:cleanup": "tsx scripts/benchmark-cleanup.ts", "lintfix": "eslint --fix --cache .", "typecheck": "tsc --noEmit -p tsconfig.lib.json" }, diff --git a/cli/scripts/benchmark-cleanup.ts b/cli/scripts/benchmark-cleanup.ts new file mode 100644 index 00000000..7037235b --- /dev/null +++ b/cli/scripts/benchmark-cleanup.ts @@ -0,0 +1,154 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {performance} from 'node:perf_hooks' +import glob from 'fast-glob' + +process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' +delete process.env['VITEST'] +delete process.env['VITEST_WORKER_ID'] + +const cleanupModule = await import('../src/commands/CleanupUtils') +const fallbackModule = await import('../src/commands/CleanupUtils.fallback') +const pluginCore = await import('../src/plugins/plugin-core') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: pluginCore.FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: Array.from({length: 40}, (_, index) => ({ + dirFromWorkspacePath: { + pathKind: pluginCore.FilePathKind.Relative, + path: `project-${index}`, + basePath: workspaceDir, + getDirectoryName: () => `project-${index}`, + getAbsolutePath: () => path.join(workspaceDir, `project-${index}`) + } + })) + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createBenchmarkPlugin(workspaceDir: string): OutputPlugin { + return { + type: pluginCore.PluginKind.Output, + name: 'BenchmarkOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return Array.from({length: 40}, (_, projectIndex) => ([ + {path: path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md'), source: {}}, + {path: path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md'), source: {}} + ])).flat() + }, + async declareCleanupPaths(): Promise { + return { + delete: [{ + kind: 'glob', + path: path.join(workspaceDir, '.codex', 'skills', '*'), + excludeBasenames: ['.system'] + }, { + kind: 'glob', + path: path.join(workspaceDir, '.claude', '**', 'CLAUDE.md') + }], + protect: [{ + kind: 'directory', + path: path.join(workspaceDir, '.codex', 'skills', '.system'), + protectionMode: 'recursive' + }] + } + }, + async convertContent() { + return 'benchmark' + } + } +} + +async function measure(label: string, iterations: number, run: () => Promise): Promise { + const start = performance.now() + for (let index = 0; index < iterations; index += 1) { + await run() + } + const total = performance.now() - start + const average = total / iterations + process.stdout.write(`${label}: total=${total.toFixed(2)}ms avg=${average.toFixed(2)}ms\n`) + return average +} + +async function main(): Promise { + if (!cleanupModule.hasNativeCleanupBinding()) { + throw new Error('Native cleanup binding is unavailable. Build the CLI NAPI module first.') + } + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-benchmark-cleanup-')) + const workspaceDir = path.join(tempDir, 'workspace') + + try { + for (let projectIndex = 0; projectIndex < 40; projectIndex += 1) { + const rootFile = path.join(workspaceDir, `project-${projectIndex}`, 'AGENTS.md') + const childFile = path.join(workspaceDir, `project-${projectIndex}`, 'commands', 'AGENTS.md') + fs.mkdirSync(path.dirname(childFile), {recursive: true}) + fs.writeFileSync(rootFile, '# root', 'utf8') + fs.writeFileSync(childFile, '# child', 'utf8') + } + + const skillsDir = path.join(workspaceDir, '.codex', 'skills') + fs.mkdirSync(path.join(skillsDir, '.system'), {recursive: true}) + for (let index = 0; index < 80; index += 1) { + const skillDir = path.join(skillsDir, `legacy-${index}`) + fs.mkdirSync(skillDir, {recursive: true}) + fs.writeFileSync(path.join(skillDir, 'SKILL.md'), '# stale', 'utf8') + } + + for (let index = 0; index < 40; index += 1) { + const claudeFile = path.join(workspaceDir, '.claude', `project-${index}`, 'CLAUDE.md') + fs.mkdirSync(path.dirname(claudeFile), {recursive: true}) + fs.writeFileSync(claudeFile, '# claude', 'utf8') + } + + const plugin = createBenchmarkPlugin(workspaceDir) + const cleanCtx = createCleanContext(workspaceDir) + const iterations = 25 + + process.stdout.write(`cleanup benchmark iterations=${iterations}\n`) + const fallbackAvg = await measure('fallback-plan', iterations, async () => { + await fallbackModule.collectDeletionTargets([plugin], cleanCtx) + }) + const nativeAvg = await measure('native-plan', iterations, async () => { + await cleanupModule.collectDeletionTargets([plugin], cleanCtx) + }) + + const delta = nativeAvg - fallbackAvg + process.stdout.write(`delta=${delta.toFixed(2)}ms (${((delta / fallbackAvg) * 100).toFixed(2)}%)\n`) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } +} + +await main() diff --git a/cli/scripts/cleanup-native-smoke.ts b/cli/scripts/cleanup-native-smoke.ts new file mode 100644 index 00000000..9174df3f --- /dev/null +++ b/cli/scripts/cleanup-native-smoke.ts @@ -0,0 +1,141 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../src/plugins/plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' + +process.env['TNMSC_FORCE_NATIVE_BINDING'] = '1' +delete process.env['VITEST'] +delete process.env['VITEST_WORKER_ID'] + +const cleanupModule = await import('../src/commands/CleanupUtils') +const fallbackModule = await import('../src/commands/CleanupUtils.fallback') +const pluginCore = await import('../src/plugins/plugin-core') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: pluginCore.FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: pluginCore.FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createSmokePlugin(workspaceDir: string): OutputPlugin { + return { + type: pluginCore.PluginKind.Output, + name: 'SmokeOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [ + {path: path.join(workspaceDir, 'project-a', 'AGENTS.md'), source: {}}, + {path: path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md'), source: {}} + ] + }, + async declareCleanupPaths(): Promise { + return { + delete: [{ + kind: 'glob', + path: path.join(workspaceDir, '.codex', 'skills', '*'), + excludeBasenames: ['.system'] + }] + } + }, + async convertContent() { + return 'smoke' + } + } +} + +async function main(): Promise { + if (!cleanupModule.hasNativeCleanupBinding()) { + throw new Error('Native cleanup binding is unavailable. Build the CLI NAPI module first.') + } + + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-native-cleanup-smoke-')) + const workspaceDir = path.join(tempDir, 'workspace') + const legacySkillDir = path.join(workspaceDir, '.codex', 'skills', 'legacy') + const preservedSkillDir = path.join(workspaceDir, '.codex', 'skills', '.system') + const rootOutput = path.join(workspaceDir, 'project-a', 'AGENTS.md') + const childOutput = path.join(workspaceDir, 'project-a', 'commands', 'AGENTS.md') + + fs.mkdirSync(path.dirname(rootOutput), {recursive: true}) + fs.mkdirSync(path.dirname(childOutput), {recursive: true}) + fs.mkdirSync(legacySkillDir, {recursive: true}) + fs.mkdirSync(preservedSkillDir, {recursive: true}) + fs.writeFileSync(rootOutput, '# root', 'utf8') + fs.writeFileSync(childOutput, '# child', 'utf8') + fs.writeFileSync(path.join(legacySkillDir, 'SKILL.md'), '# stale', 'utf8') + fs.writeFileSync(path.join(preservedSkillDir, 'SKILL.md'), '# keep', 'utf8') + + try { + const plugin = createSmokePlugin(workspaceDir) + const cleanCtx = createCleanContext(workspaceDir) + + const nativePlan = await cleanupModule.collectDeletionTargets([plugin], cleanCtx) + const fallbackPlan = await fallbackModule.collectDeletionTargets([plugin], cleanCtx) + + const sortPaths = (value: {filesToDelete: string[], dirsToDelete: string[], excludedScanGlobs: string[]}) => ({ + ...value, + filesToDelete: [...value.filesToDelete].sort(), + dirsToDelete: [...value.dirsToDelete].sort(), + excludedScanGlobs: [...value.excludedScanGlobs].sort() + }) + + if (JSON.stringify(sortPaths(nativePlan)) !== JSON.stringify(sortPaths(fallbackPlan))) { + throw new Error(`Native cleanup plan mismatch.\nNative: ${JSON.stringify(nativePlan, null, 2)}\nFallback: ${JSON.stringify(fallbackPlan, null, 2)}`) + } + + const result = await cleanupModule.performCleanup([plugin], cleanCtx, createMockLogger()) + if (result.deletedFiles !== 2 || result.deletedDirs !== 1 || result.errors.length > 0) { + throw new Error(`Unexpected native cleanup result: ${JSON.stringify(result, null, 2)}`) + } + + if (fs.existsSync(rootOutput) || fs.existsSync(childOutput) || fs.existsSync(legacySkillDir)) { + throw new Error('Native cleanup did not remove the expected outputs') + } + if (!fs.existsSync(preservedSkillDir)) { + throw new Error('Native cleanup removed the preserved .system skill directory') + } + + process.stdout.write('cleanup-native-smoke: ok\n') + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } +} + +await main() diff --git a/cli/src/commands/CleanupUtils.adapter.test.ts b/cli/src/commands/CleanupUtils.adapter.test.ts new file mode 100644 index 00000000..cdb02db3 --- /dev/null +++ b/cli/src/commands/CleanupUtils.adapter.test.ts @@ -0,0 +1,146 @@ +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputPlugin} from '../plugins/plugin-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it, vi} from 'vitest' +import {FilePathKind, PluginKind} from '../plugins/plugin-core' + +const nativeBindingMocks = vi.hoisted(() => ({ + planCleanup: vi.fn<(snapshotJson: string) => string>(), + performCleanup: vi.fn<(snapshotJson: string) => string>() +})) + +vi.mock('../core/native-binding', () => ({ + getNativeBinding: () => ({ + planCleanup: nativeBindingMocks.planCleanup, + performCleanup: nativeBindingMocks.performCleanup + }) +})) + +const cleanupModulePromise = import('./CleanupUtils') + +function createMockLogger(): ILogger { + return { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + } as ILogger +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [{ + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } + }] + }, + aindexDir: path.join(workspaceDir, 'aindex') + } + } as OutputCleanContext +} + +function createMockOutputPlugin(): OutputPlugin { + return { + type: PluginKind.Output, + name: 'MockOutputPlugin', + log: createMockLogger(), + declarativeOutput: true, + outputCapabilities: {}, + async declareOutputFiles() { + return [{path: path.join('/tmp', 'project-a', 'AGENTS.md'), source: {}}] + }, + async declareCleanupPaths(): Promise { + return { + delete: [{kind: 'glob', path: path.join('/tmp', '.codex', 'skills', '*'), excludeBasenames: ['.system']}] + } + }, + async convertContent() { + return 'test' + } + } +} + +describe('cleanupUtils native adapter', () => { + it('uses the native cleanup bridge when it is available', async () => { + nativeBindingMocks.planCleanup.mockReset() + nativeBindingMocks.performCleanup.mockReset() + + nativeBindingMocks.planCleanup.mockReturnValue(JSON.stringify({ + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + violations: [], + conflicts: [], + excludedScanGlobs: ['**/.git/**'] + })) + nativeBindingMocks.performCleanup.mockReturnValue(JSON.stringify({ + deletedFiles: 1, + deletedDirs: 1, + errors: [], + violations: [], + conflicts: [], + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + excludedScanGlobs: ['**/.git/**'] + })) + + const {collectDeletionTargets, hasNativeCleanupBinding, performCleanup} = await cleanupModulePromise + const workspaceDir = path.resolve('tmp-native-cleanup-adapter') + const cleanCtx = createCleanContext(workspaceDir) + const plugin = createMockOutputPlugin() + + expect(hasNativeCleanupBinding()).toBe(true) + + const plan = await collectDeletionTargets([plugin], cleanCtx) + expect(plan).toEqual({ + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + violations: [], + conflicts: [], + excludedScanGlobs: ['**/.git/**'] + }) + expect(nativeBindingMocks.planCleanup).toHaveBeenCalledOnce() + + const planSnapshot = JSON.parse(String(nativeBindingMocks.planCleanup.mock.calls[0]?.[0])) as { + readonly pluginSnapshots: readonly {pluginName: string, outputs: readonly string[], cleanup: {delete?: readonly {kind: string}[]}}[] + } + expect(planSnapshot.pluginSnapshots).toEqual([ + expect.objectContaining({ + pluginName: 'MockOutputPlugin', + outputs: ['/tmp/project-a/AGENTS.md'], + cleanup: expect.objectContaining({ + delete: [expect.objectContaining({kind: 'glob'})] + }) + }) + ]) + + const result = await performCleanup([plugin], cleanCtx, createMockLogger()) + expect(result).toEqual({ + deletedFiles: 1, + deletedDirs: 1, + errors: [], + violations: [], + conflicts: [] + }) + expect(nativeBindingMocks.performCleanup).toHaveBeenCalledOnce() + }) +}) diff --git a/cli/src/commands/CleanupUtils.fallback.ts b/cli/src/commands/CleanupUtils.fallback.ts new file mode 100644 index 00000000..7119baed --- /dev/null +++ b/cli/src/commands/CleanupUtils.fallback.ts @@ -0,0 +1,508 @@ +import type {DeletionError} from '../core/desk-paths' +import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputFileDeclaration, OutputPlugin, PluginOptions} from '../plugins/plugin-core' +import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import { + buildDiagnostic, + buildFileOperationDiagnostic, + diagnosticLines +} from '@/diagnostics' +import {compactDeletionTargets} from '../cleanup/delete-targets' +import {deleteTargets as deskDeleteTargets} from '../core/desk-paths' +import { + collectAllPluginOutputs +} from '../plugins/plugin-core' +import { + buildComparisonKeys, + collectConfiguredAindexInputRules, + collectProjectRoots, + collectProtectedInputSourceRules, + createProtectedDeletionGuard, + logProtectedDeletionGuardError, + partitionDeletionTargets, + resolveAbsolutePath +} from '../ProtectedDeletionGuard' + +/** + * Result of cleanup operation + */ +export interface CleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly CleanupError[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly message?: string +} + +/** + * Error during cleanup operation + */ +export interface CleanupError { + readonly path: string + readonly type: 'file' | 'directory' + readonly error: unknown +} + +export interface CleanupProtectionConflict { + readonly outputPath: string + readonly outputPlugin: string + readonly protectedPath: string + readonly protectionMode: ProtectionMode + readonly protectedBy: string + readonly reason: string +} + +export class CleanupProtectionConflictError extends Error { + readonly conflicts: readonly CleanupProtectionConflict[] + + constructor(conflicts: readonly CleanupProtectionConflict[]) { + super(buildCleanupProtectionConflictMessage(conflicts)) + this.name = 'CleanupProtectionConflictError' + this.conflicts = conflicts + } +} + +interface CleanupTargetCollections { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] + readonly conflicts: readonly CleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} + +const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = [ + '**/node_modules/**', + '**/.git/**', + '**/.turbo/**', + '**/.pnpm-store/**', + '**/.yarn/**', + '**/.next/**' +] as const + +function normalizeGlobPattern(pattern: string): string { + return resolveAbsolutePath(pattern).replaceAll('\\', '/') +} + +function expandCleanupGlob( + pattern: string, + ignoreGlobs: readonly string[] +): readonly string[] { + const normalizedPattern = normalizeGlobPattern(pattern) + return glob.sync(normalizedPattern, { + onlyFiles: false, + dot: true, + absolute: true, + followSymbolicLinks: false, + ignore: [...ignoreGlobs] + }) +} + +function shouldExcludeCleanupMatch( + matchedPath: string, + target: OutputCleanupPathDeclaration +): boolean { + if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false + const basename = path.basename(matchedPath) + return target.excludeBasenames.includes(basename) +} + +async function collectPluginCleanupDeclarations( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext +): Promise { + if (plugin.declareCleanupPaths == null) return {} + return plugin.declareCleanupPaths({...cleanCtx, dryRun: true}) +} + +async function collectPluginCleanupSnapshot( + plugin: OutputPlugin, + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + readonly plugin: OutputPlugin + readonly outputs: Awaited> + readonly cleanup: OutputCleanupDeclarations +}> { + const existingOutputDeclarations = predeclaredOutputs?.get(plugin) + const [outputs, cleanup] = await Promise.all([ + existingOutputDeclarations != null + ? Promise.resolve(existingOutputDeclarations) + : plugin.declareOutputFiles({...cleanCtx, dryRun: true}), + collectPluginCleanupDeclarations(plugin, cleanCtx) + ]) + + return {plugin, outputs, cleanup} +} + +function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +} + +function detectCleanupProtectionConflicts( + outputPathOwners: ReadonlyMap, + guard: ReturnType +): CleanupProtectionConflict[] { + const conflicts: CleanupProtectionConflict[] = [] + + for (const [outputPath, outputPlugins] of outputPathOwners.entries()) { + const outputKeys = new Set(buildComparisonKeys(outputPath)) + + for (const rule of guard.compiledRules) { + const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey)) + if (!isExactMatch) continue + + for (const outputPlugin of outputPlugins) { + conflicts.push({ + outputPath, + outputPlugin, + protectedPath: rule.path, + protectionMode: rule.protectionMode, + protectedBy: rule.source, + reason: rule.reason + }) + } + } + } + + return conflicts.sort((a, b) => { + const pathDiff = a.outputPath.localeCompare(b.outputPath) + if (pathDiff !== 0) return pathDiff + return a.protectedPath.localeCompare(b.protectedPath) + }) +} + +function logCleanupProtectionConflicts( + logger: ILogger, + conflicts: readonly CleanupProtectionConflict[] +): void { + const firstConflict = conflicts[0] + + logger.error(buildDiagnostic({ + code: 'CLEANUP_PROTECTION_CONFLICT_DETECTED', + title: 'Cleanup output paths conflict with protected inputs', + rootCause: diagnosticLines( + `tnmsc found ${conflicts.length} output path(s) that also match protected cleanup rules.`, + firstConflict == null + ? 'No conflict details were captured.' + : `Example conflict: "${firstConflict.outputPath}" is protected by "${firstConflict.protectedPath}".` + ), + exactFix: diagnosticLines( + 'Separate generated output paths from protected source or reserved workspace paths before running cleanup again.' + ), + possibleFixes: [ + diagnosticLines('Update cleanup protect declarations so they do not overlap generated outputs.'), + diagnosticLines('Move the conflicting output target to a generated-only directory.') + ], + details: { + count: conflicts.length, + conflicts: conflicts.map(conflict => ({ + outputPath: conflict.outputPath, + outputPlugin: conflict.outputPlugin, + protectedPath: conflict.protectedPath, + protectionMode: conflict.protectionMode, + protectedBy: conflict.protectedBy, + reason: conflict.reason + })) + } + })) +} + +/** + * Collect deletion targets from enabled output plugins. + */ +export async function collectDeletionTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + const deleteFiles = new Set() + const deleteDirs = new Set() + const protectedRules = new Map() + const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS) + const outputPathOwners = new Map() + + const pluginSnapshots = await Promise.all( + outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs)) + ) + + const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => { + if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath)) + else deleteFiles.add(resolveAbsolutePath(rawPath)) + } + + const addProtectRule = ( + rawPath: string, + protectionMode: ProtectionMode, + reason: string, + source: string, + matcher: ProtectionRuleMatcher = 'path' + ): void => { + const resolvedPath = resolveAbsolutePath(rawPath) + protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, { + path: resolvedPath, + protectionMode, + reason, + source, + matcher + }) + } + + const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => { + if (target.protectionMode != null) return target.protectionMode + return target.kind === 'file' ? 'direct' : 'recursive' + } + + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source) + if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { + for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path + })) { + addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher) + } + } + + for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) { + addProtectRule( + rule.path, + rule.protectionMode, + rule.reason ?? 'configured cleanup protection rule', + 'configured-cleanup-protection', + rule.matcher ?? 'path' + ) + } + + for (const snapshot of pluginSnapshots) { + for (const declaration of snapshot.outputs) { + const resolvedOutputPath = resolveAbsolutePath(declaration.path) + addDeletePath(resolvedOutputPath, 'file') + const existingOwners = outputPathOwners.get(resolvedOutputPath) + if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name]) + else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name) + } + for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob)) + } + + const excludeScanGlobs = [...excludeScanGlobSet] + + const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => { + for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { + if (shouldExcludeCleanupMatch(matchedPath, target)) continue + + try { + const stat = fs.lstatSync(matchedPath) + if (stat.isDirectory()) addDeletePath(matchedPath, 'directory') + else addDeletePath(matchedPath, 'file') + } + catch {} + } + } + + const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => { + const protectionMode = defaultProtectionModeForTarget(target) + const reason = target.label != null + ? `plugin cleanup protect declaration (${target.label})` + : 'plugin cleanup protect declaration' + + for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { + addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`) + } + } + + for (const {plugin, cleanup} of pluginSnapshots) { + for (const target of cleanup.protect ?? []) { + if (target.kind === 'glob') { + resolveProtectGlob(target, plugin.name) + continue + } + addProtectRule( + target.path, + defaultProtectionModeForTarget(target), + target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration', + `plugin-cleanup-protect:${plugin.name}` + ) + } + + for (const target of cleanup.delete ?? []) { + if (target.kind === 'glob') { + resolveDeleteGlob(target) + continue + } + if (target.kind === 'directory') addDeletePath(target.path, 'directory') + else addDeletePath(target.path, 'file') + } + } + + const guard = createProtectedDeletionGuard({ + workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + rules: [...protectedRules.values()], + ...cleanCtx.collectedOutputContext.aindexDir != null + ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} + : {} + }) + const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard) + if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts) + const filePartition = partitionDeletionTargets([...deleteFiles], guard) + const dirPartition = partitionDeletionTargets([...deleteDirs], guard) + + const compactedTargets = compactDeletionTargets( + filePartition.safePaths, + dirPartition.safePaths + ) + + return { + filesToDelete: compactedTargets.files, + dirsToDelete: compactedTargets.dirs, + violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)), + conflicts: [], + excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)) + } +} + +function buildCleanupErrors( + logger: ILogger, + errors: readonly DeletionError[], + type: 'file' | 'directory' +): CleanupError[] { + return errors.map(currentError => { + const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error) + logger.warn(buildFileOperationDiagnostic({ + code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: type, + path: currentError.path, + error: errorMessage, + details: { + phase: 'cleanup' + } + })) + + return {path: currentError.path, type, error: currentError.error} + }) +} + +async function executeCleanupTargets( + targets: CleanupTargetCollections, + logger: ILogger +): Promise<{deletedFiles: number, deletedDirs: number, errors: CleanupError[]}> { + logger.debug('cleanup delete execution started', { + filesToDelete: targets.filesToDelete.length, + dirsToDelete: targets.dirsToDelete.length + }) + + const result = await deskDeleteTargets({ + files: targets.filesToDelete, + dirs: targets.dirsToDelete + }) + + const fileErrors = buildCleanupErrors(logger, result.fileErrors, 'file') + const dirErrors = buildCleanupErrors(logger, result.dirErrors, 'directory') + const allErrors = [...fileErrors, ...dirErrors] + + logger.debug('cleanup delete execution complete', { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length, + errors: allErrors.length + }) + + return { + deletedFiles: result.deletedFiles.length, + deletedDirs: result.deletedDirs.length, + errors: allErrors + } +} + +function logCleanupPlanDiagnostics( + logger: ILogger, + targets: CleanupTargetCollections +): void { + logger.debug('cleanup plan built', { + filesToDelete: targets.filesToDelete.length, + dirsToDelete: targets.dirsToDelete.length, + violations: targets.violations.length, + conflicts: targets.conflicts.length, + excludedScanGlobs: targets.excludedScanGlobs + }) +} + +/** + * Perform cleanup operation for output plugins. + * This is the main reusable cleanup function that can be called from both + * CleanCommand and ExecuteCommand (for pre-cleanup). + */ +export async function performCleanup( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + logger: ILogger, + predeclaredOutputs?: ReadonlyMap +): Promise { + if (predeclaredOutputs != null) { + const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) + logger.debug('Collected outputs for cleanup', { + projectDirs: outputs.projectDirs.length, + projectFiles: outputs.projectFiles.length, + globalDirs: outputs.globalDirs.length, + globalFiles: outputs.globalFiles.length + }) + } + + let targets: Awaited> + try { + targets = await collectDeletionTargets(outputPlugins, cleanCtx, predeclaredOutputs) + } + catch (error) { + if (error instanceof CleanupProtectionConflictError) { + logCleanupProtectionConflicts(logger, error.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: error.conflicts, + message: error.message + } + } + throw error + } + const cleanupTargets: CleanupTargetCollections = { + filesToDelete: targets.filesToDelete, + dirsToDelete: targets.dirsToDelete, + violations: targets.violations, + conflicts: targets.conflicts, + excludedScanGlobs: targets.excludedScanGlobs + } + logCleanupPlanDiagnostics(logger, cleanupTargets) + + if (cleanupTargets.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: cleanupTargets.violations, + conflicts: [], + message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)` + } + } + + const executionResult = await executeCleanupTargets(cleanupTargets, logger) + + return { + deletedFiles: executionResult.deletedFiles, + deletedDirs: executionResult.deletedDirs, + errors: executionResult.errors, + violations: [], + conflicts: [] + } +} diff --git a/cli/src/commands/CleanupUtils.ts b/cli/src/commands/CleanupUtils.ts index 7119baed..78144a8e 100644 --- a/cli/src/commands/CleanupUtils.ts +++ b/cli/src/commands/CleanupUtils.ts @@ -1,112 +1,149 @@ -import type {DeletionError} from '../core/desk-paths' import type {ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputFileDeclaration, OutputPlugin, PluginOptions} from '../plugins/plugin-core' -import type {ProtectedPathRule, ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' -import * as fs from 'node:fs' -import * as path from 'node:path' -import glob from 'fast-glob' +import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' import { buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines } from '@/diagnostics' -import {compactDeletionTargets} from '../cleanup/delete-targets' -import {deleteTargets as deskDeleteTargets} from '../core/desk-paths' +import {getNativeBinding} from '../core/native-binding' +import {collectAllPluginOutputs} from '../plugins/plugin-core' import { - collectAllPluginOutputs -} from '../plugins/plugin-core' -import { - buildComparisonKeys, collectConfiguredAindexInputRules, collectProjectRoots, collectProtectedInputSourceRules, - createProtectedDeletionGuard, - logProtectedDeletionGuardError, - partitionDeletionTargets, - resolveAbsolutePath + logProtectedDeletionGuardError } from '../ProtectedDeletionGuard' +import { + CleanupProtectionConflictError, + collectDeletionTargets as collectDeletionTargetsFallback, + performCleanup as performCleanupFallback +} from './CleanupUtils.fallback' + +export type { + CleanupError, + CleanupProtectionConflict, + CleanupResult +} from './CleanupUtils.fallback' +export { + CleanupProtectionConflictError +} from './CleanupUtils.fallback' + +interface NativeCleanupBinding { + readonly planCleanup?: (snapshotJson: string) => string | Promise + readonly performCleanup?: (snapshotJson: string) => string | Promise +} -/** - * Result of cleanup operation - */ -export interface CleanupResult { - readonly deletedFiles: number - readonly deletedDirs: number - readonly errors: readonly CleanupError[] - readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] - readonly conflicts: readonly CleanupProtectionConflict[] - readonly message?: string +type NativeProtectionMode = 'direct' | 'recursive' +type NativeProtectionRuleMatcher = 'path' | 'glob' +type NativeCleanupTargetKind = 'file' | 'directory' | 'glob' +type NativeCleanupErrorKind = 'file' | 'directory' + +interface NativeCleanupTarget { + readonly path: string + readonly kind: NativeCleanupTargetKind + readonly excludeBasenames?: readonly string[] + readonly protectionMode?: NativeProtectionMode + readonly scope?: string + readonly label?: string +} + +interface NativeCleanupDeclarations { + readonly delete?: readonly NativeCleanupTarget[] + readonly protect?: readonly NativeCleanupTarget[] + readonly excludeScanGlobs?: readonly string[] +} + +interface NativePluginCleanupSnapshot { + readonly pluginName: string + readonly outputs: readonly string[] + readonly cleanup: NativeCleanupDeclarations } -/** - * Error during cleanup operation - */ -export interface CleanupError { +interface NativeProtectedRule { readonly path: string - readonly type: 'file' | 'directory' - readonly error: unknown + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string + readonly matcher?: NativeProtectionRuleMatcher | undefined +} + +interface NativeCleanupSnapshot { + readonly workspaceDir: string + readonly aindexDir?: string + readonly projectRoots: readonly string[] + readonly protectedRules: readonly NativeProtectedRule[] + readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] } -export interface CleanupProtectionConflict { +interface NativeProtectedPathViolation { + readonly targetPath: string + readonly protectedPath: string + readonly protectionMode: NativeProtectionMode + readonly reason: string + readonly source: string +} + +interface NativeCleanupProtectionConflict { readonly outputPath: string readonly outputPlugin: string readonly protectedPath: string - readonly protectionMode: ProtectionMode + readonly protectionMode: NativeProtectionMode readonly protectedBy: string readonly reason: string } -export class CleanupProtectionConflictError extends Error { - readonly conflicts: readonly CleanupProtectionConflict[] +interface NativeCleanupPlan { + readonly filesToDelete: string[] + readonly dirsToDelete: string[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] + readonly excludedScanGlobs: string[] +} - constructor(conflicts: readonly CleanupProtectionConflict[]) { - super(buildCleanupProtectionConflictMessage(conflicts)) - this.name = 'CleanupProtectionConflictError' - this.conflicts = conflicts - } +interface NativeCleanupError { + readonly path: string + readonly kind: NativeCleanupErrorKind + readonly error: string } -interface CleanupTargetCollections { +interface NativeCleanupResult { + readonly deletedFiles: number + readonly deletedDirs: number + readonly errors: readonly NativeCleanupError[] + readonly violations: readonly NativeProtectedPathViolation[] + readonly conflicts: readonly NativeCleanupProtectionConflict[] readonly filesToDelete: string[] readonly dirsToDelete: string[] - readonly violations: readonly import('../ProtectedDeletionGuard').ProtectedPathViolation[] - readonly conflicts: readonly CleanupProtectionConflict[] readonly excludedScanGlobs: string[] } -const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS = [ - '**/node_modules/**', - '**/.git/**', - '**/.turbo/**', - '**/.pnpm-store/**', - '**/.yarn/**', - '**/.next/**' -] as const - -function normalizeGlobPattern(pattern: string): string { - return resolveAbsolutePath(pattern).replaceAll('\\', '/') +const nativeBinding = getNativeBinding() + +export function hasNativeCleanupBinding(): boolean { + return nativeBinding?.planCleanup != null && nativeBinding.performCleanup != null } -function expandCleanupGlob( - pattern: string, - ignoreGlobs: readonly string[] -): readonly string[] { - const normalizedPattern = normalizeGlobPattern(pattern) - return glob.sync(normalizedPattern, { - onlyFiles: false, - dot: true, - absolute: true, - followSymbolicLinks: false, - ignore: [...ignoreGlobs] - }) +function mapProtectionMode(mode: ProtectionMode): NativeProtectionMode { + return mode } -function shouldExcludeCleanupMatch( - matchedPath: string, - target: OutputCleanupPathDeclaration -): boolean { - if (target.excludeBasenames == null || target.excludeBasenames.length === 0) return false - const basename = path.basename(matchedPath) - return target.excludeBasenames.includes(basename) +function mapProtectionRuleMatcher( + matcher: ProtectionRuleMatcher | undefined +): NativeProtectionRuleMatcher | undefined { + return matcher +} + +function mapCleanupTarget(target: OutputCleanupPathDeclaration): NativeCleanupTarget { + return { + path: target.path, + kind: target.kind, + ...target.excludeBasenames != null && target.excludeBasenames.length > 0 + ? {excludeBasenames: [...target.excludeBasenames]} + : {}, + ...target.protectionMode != null ? {protectionMode: mapProtectionMode(target.protectionMode)} : {}, + ...target.scope != null ? {scope: target.scope} : {}, + ...target.label != null ? {label: target.label} : {} + } } async function collectPluginCleanupDeclarations( @@ -121,11 +158,7 @@ async function collectPluginCleanupSnapshot( plugin: OutputPlugin, cleanCtx: OutputCleanContext, predeclaredOutputs?: ReadonlyMap -): Promise<{ - readonly plugin: OutputPlugin - readonly outputs: Awaited> - readonly cleanup: OutputCleanupDeclarations -}> { +): Promise { const existingOutputDeclarations = predeclaredOutputs?.get(plugin) const [outputs, cleanup] = await Promise.all([ existingOutputDeclarations != null @@ -134,50 +167,45 @@ async function collectPluginCleanupSnapshot( collectPluginCleanupDeclarations(plugin, cleanCtx) ]) - return {plugin, outputs, cleanup} + return { + pluginName: plugin.name, + outputs: outputs.map(output => output.path), + cleanup: { + ...cleanup.delete != null && cleanup.delete.length > 0 + ? {delete: cleanup.delete.map(mapCleanupTarget)} + : {}, + ...cleanup.protect != null && cleanup.protect.length > 0 + ? {protect: cleanup.protect.map(mapCleanupTarget)} + : {}, + ...cleanup.excludeScanGlobs != null && cleanup.excludeScanGlobs.length > 0 + ? {excludeScanGlobs: [...cleanup.excludeScanGlobs]} + : {} + } + } } -function buildCleanupProtectionConflictMessage(conflicts: readonly CleanupProtectionConflict[]): string { - const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') - return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` +function collectConfiguredCleanupProtectionRules( + cleanCtx: OutputCleanContext +): NativeProtectedRule[] { + return (cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []).map(rule => ({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason ?? 'configured cleanup protection rule', + source: 'configured-cleanup-protection', + matcher: mapProtectionRuleMatcher(rule.matcher ?? 'path') + })) } -function detectCleanupProtectionConflicts( - outputPathOwners: ReadonlyMap, - guard: ReturnType -): CleanupProtectionConflict[] { - const conflicts: CleanupProtectionConflict[] = [] - - for (const [outputPath, outputPlugins] of outputPathOwners.entries()) { - const outputKeys = new Set(buildComparisonKeys(outputPath)) - - for (const rule of guard.compiledRules) { - const isExactMatch = rule.comparisonKeys.some(ruleKey => outputKeys.has(ruleKey)) - if (!isExactMatch) continue - - for (const outputPlugin of outputPlugins) { - conflicts.push({ - outputPath, - outputPlugin, - protectedPath: rule.path, - protectionMode: rule.protectionMode, - protectedBy: rule.source, - reason: rule.reason - }) - } - } - } - - return conflicts.sort((a, b) => { - const pathDiff = a.outputPath.localeCompare(b.outputPath) - if (pathDiff !== 0) return pathDiff - return a.protectedPath.localeCompare(b.protectedPath) - }) +function buildCleanupProtectionConflictMessage( + conflicts: readonly NativeCleanupProtectionConflict[] +): string { + const pathList = conflicts.map(conflict => conflict.outputPath).join(', ') + return `Cleanup protection conflict: ${conflicts.length} output path(s) are also protected: ${pathList}` } function logCleanupProtectionConflicts( logger: ILogger, - conflicts: readonly CleanupProtectionConflict[] + conflicts: readonly NativeCleanupProtectionConflict[] ): void { const firstConflict = conflicts[0] @@ -199,254 +227,156 @@ function logCleanupProtectionConflicts( ], details: { count: conflicts.length, - conflicts: conflicts.map(conflict => ({ - outputPath: conflict.outputPath, - outputPlugin: conflict.outputPlugin, - protectedPath: conflict.protectedPath, - protectionMode: conflict.protectionMode, - protectedBy: conflict.protectedBy, - reason: conflict.reason - })) + conflicts } })) } -/** - * Collect deletion targets from enabled output plugins. - */ -export async function collectDeletionTargets( +function logCleanupPlanDiagnostics( + logger: ILogger, + plan: Pick +): void { + logger.debug('cleanup plan built', { + filesToDelete: plan.filesToDelete.length, + dirsToDelete: plan.dirsToDelete.length, + violations: plan.violations.length, + conflicts: plan.conflicts.length, + excludedScanGlobs: plan.excludedScanGlobs + }) +} + +function logNativeCleanupErrors( + logger: ILogger, + errors: readonly NativeCleanupError[] +): readonly {path: string, type: 'file' | 'directory', error: string}[] { + return errors.map(currentError => { + const type = currentError.kind === 'directory' ? 'directory' : 'file' + logger.warn(buildFileOperationDiagnostic({ + code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', + title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', + operation: 'delete', + targetKind: type, + path: currentError.path, + error: currentError.error, + details: { + phase: 'cleanup' + } + })) + + return {path: currentError.path, type, error: currentError.error} + }) +} + +async function buildCleanupSnapshot( outputPlugins: readonly OutputPlugin[], cleanCtx: OutputCleanContext, predeclaredOutputs?: ReadonlyMap -): Promise<{ - filesToDelete: string[] - dirsToDelete: string[] - violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] - conflicts: CleanupProtectionConflict[] - excludedScanGlobs: string[] -}> { - const deleteFiles = new Set() - const deleteDirs = new Set() - const protectedRules = new Map() - const excludeScanGlobSet = new Set(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS) - const outputPathOwners = new Map() - +): Promise { const pluginSnapshots = await Promise.all( outputPlugins.map(async plugin => collectPluginCleanupSnapshot(plugin, cleanCtx, predeclaredOutputs)) ) - const addDeletePath = (rawPath: string, kind: 'file' | 'directory'): void => { - if (kind === 'directory') deleteDirs.add(resolveAbsolutePath(rawPath)) - else deleteFiles.add(resolveAbsolutePath(rawPath)) - } - - const addProtectRule = ( - rawPath: string, - protectionMode: ProtectionMode, - reason: string, - source: string, - matcher: ProtectionRuleMatcher = 'path' - ): void => { - const resolvedPath = resolveAbsolutePath(rawPath) - protectedRules.set(`${matcher}:${protectionMode}:${resolvedPath}`, { - path: resolvedPath, - protectionMode, - reason, - source, - matcher + const protectedRules: NativeProtectedRule[] = [] + for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} }) } - const defaultProtectionModeForTarget = (target: OutputCleanupPathDeclaration): ProtectionMode => { - if (target.protectionMode != null) return target.protectionMode - return target.kind === 'file' ? 'direct' : 'recursive' - } - - for (const rule of collectProtectedInputSourceRules(cleanCtx.collectedOutputContext)) addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source) if (cleanCtx.collectedOutputContext.aindexDir != null && cleanCtx.pluginOptions != null) { - for (const rule of collectConfiguredAindexInputRules(cleanCtx.pluginOptions as Required, cleanCtx.collectedOutputContext.aindexDir, { - workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path - })) { - addProtectRule(rule.path, rule.protectionMode, rule.reason, rule.source, rule.matcher) + for (const rule of collectConfiguredAindexInputRules( + cleanCtx.pluginOptions as Required, + cleanCtx.collectedOutputContext.aindexDir, + {workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path} + )) { + protectedRules.push({ + path: rule.path, + protectionMode: mapProtectionMode(rule.protectionMode), + reason: rule.reason, + source: rule.source, + ...rule.matcher != null ? {matcher: mapProtectionRuleMatcher(rule.matcher)} : {} + }) } } - for (const rule of cleanCtx.pluginOptions?.cleanupProtection?.rules ?? []) { - addProtectRule( - rule.path, - rule.protectionMode, - rule.reason ?? 'configured cleanup protection rule', - 'configured-cleanup-protection', - rule.matcher ?? 'path' - ) - } + protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx)) - for (const snapshot of pluginSnapshots) { - for (const declaration of snapshot.outputs) { - const resolvedOutputPath = resolveAbsolutePath(declaration.path) - addDeletePath(resolvedOutputPath, 'file') - const existingOwners = outputPathOwners.get(resolvedOutputPath) - if (existingOwners == null) outputPathOwners.set(resolvedOutputPath, [snapshot.plugin.name]) - else if (!existingOwners.includes(snapshot.plugin.name)) existingOwners.push(snapshot.plugin.name) - } - for (const ignoreGlob of snapshot.cleanup.excludeScanGlobs ?? []) excludeScanGlobSet.add(normalizeGlobPattern(ignoreGlob)) - } - - const excludeScanGlobs = [...excludeScanGlobSet] - - const resolveDeleteGlob = (target: OutputCleanupPathDeclaration): void => { - for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { - if (shouldExcludeCleanupMatch(matchedPath, target)) continue - - try { - const stat = fs.lstatSync(matchedPath) - if (stat.isDirectory()) addDeletePath(matchedPath, 'directory') - else addDeletePath(matchedPath, 'file') - } - catch {} - } - } - - const resolveProtectGlob = (target: OutputCleanupPathDeclaration, pluginName: string): void => { - const protectionMode = defaultProtectionModeForTarget(target) - const reason = target.label != null - ? `plugin cleanup protect declaration (${target.label})` - : 'plugin cleanup protect declaration' - - for (const matchedPath of expandCleanupGlob(target.path, excludeScanGlobs)) { - addProtectRule(matchedPath, protectionMode, reason, `plugin-cleanup-protect:${pluginName}`) - } - } - - for (const {plugin, cleanup} of pluginSnapshots) { - for (const target of cleanup.protect ?? []) { - if (target.kind === 'glob') { - resolveProtectGlob(target, plugin.name) - continue - } - addProtectRule( - target.path, - defaultProtectionModeForTarget(target), - target.label != null ? `plugin cleanup protect declaration (${target.label})` : 'plugin cleanup protect declaration', - `plugin-cleanup-protect:${plugin.name}` - ) - } - - for (const target of cleanup.delete ?? []) { - if (target.kind === 'glob') { - resolveDeleteGlob(target) - continue - } - if (target.kind === 'directory') addDeletePath(target.path, 'directory') - else addDeletePath(target.path, 'file') - } - } - - const guard = createProtectedDeletionGuard({ + return { workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, - projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), - rules: [...protectedRules.values()], ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} - : {} - }) - const conflicts = detectCleanupProtectionConflicts(outputPathOwners, guard) - if (conflicts.length > 0) throw new CleanupProtectionConflictError(conflicts) - const filePartition = partitionDeletionTargets([...deleteFiles], guard) - const dirPartition = partitionDeletionTargets([...deleteDirs], guard) - - const compactedTargets = compactDeletionTargets( - filePartition.safePaths, - dirPartition.safePaths - ) - - return { - filesToDelete: compactedTargets.files, - dirsToDelete: compactedTargets.dirs, - violations: [...filePartition.violations, ...dirPartition.violations].sort((a, b) => a.targetPath.localeCompare(b.targetPath)), - conflicts: [], - excludedScanGlobs: [...excludeScanGlobSet].sort((a, b) => a.localeCompare(b)) + : {}, + projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), + protectedRules, + pluginSnapshots } } -function buildCleanupErrors( - logger: ILogger, - errors: readonly DeletionError[], - type: 'file' | 'directory' -): CleanupError[] { - return errors.map(currentError => { - const errorMessage = currentError.error instanceof Error ? currentError.error.message : String(currentError.error) - logger.warn(buildFileOperationDiagnostic({ - code: type === 'file' ? 'CLEANUP_FILE_DELETE_FAILED' : 'CLEANUP_DIRECTORY_DELETE_FAILED', - title: type === 'file' ? 'Cleanup could not delete a file' : 'Cleanup could not delete a directory', - operation: 'delete', - targetKind: type, - path: currentError.path, - error: errorMessage, - details: { - phase: 'cleanup' - } - })) +function parseNativeJson(json: string): T { + return JSON.parse(json) as T +} - return {path: currentError.path, type, error: currentError.error} - }) +export async function planCleanupWithNative( + snapshot: NativeCleanupSnapshot +): Promise { + if (nativeBinding?.planCleanup == null) throw new Error('Native cleanup planning is unavailable') + const result = await Promise.resolve(nativeBinding.planCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) } -async function executeCleanupTargets( - targets: CleanupTargetCollections, - logger: ILogger -): Promise<{deletedFiles: number, deletedDirs: number, errors: CleanupError[]}> { - logger.debug('cleanup delete execution started', { - filesToDelete: targets.filesToDelete.length, - dirsToDelete: targets.dirsToDelete.length - }) +export async function performCleanupWithNative( + snapshot: NativeCleanupSnapshot +): Promise { + if (nativeBinding?.performCleanup == null) throw new Error('Native cleanup execution is unavailable') + const result = await Promise.resolve(nativeBinding.performCleanup(JSON.stringify(snapshot))) + return parseNativeJson(result) +} - const result = await deskDeleteTargets({ - files: targets.filesToDelete, - dirs: targets.dirsToDelete - }) +export async function collectDeletionTargets( + outputPlugins: readonly OutputPlugin[], + cleanCtx: OutputCleanContext, + predeclaredOutputs?: ReadonlyMap +): Promise<{ + filesToDelete: string[] + dirsToDelete: string[] + violations: import('../ProtectedDeletionGuard').ProtectedPathViolation[] + conflicts: import('./CleanupUtils.fallback').CleanupProtectionConflict[] + excludedScanGlobs: string[] +}> { + if (!hasNativeCleanupBinding()) { + return collectDeletionTargetsFallback(outputPlugins, cleanCtx, predeclaredOutputs) + } - const fileErrors = buildCleanupErrors(logger, result.fileErrors, 'file') - const dirErrors = buildCleanupErrors(logger, result.dirErrors, 'directory') - const allErrors = [...fileErrors, ...dirErrors] + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const plan = await planCleanupWithNative(snapshot) - logger.debug('cleanup delete execution complete', { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length, - errors: allErrors.length - }) + if (plan.conflicts.length > 0) { + throw new CleanupProtectionConflictError(plan.conflicts) + } return { - deletedFiles: result.deletedFiles.length, - deletedDirs: result.deletedDirs.length, - errors: allErrors + filesToDelete: plan.filesToDelete, + dirsToDelete: plan.dirsToDelete, + violations: [...plan.violations], + conflicts: [], + excludedScanGlobs: plan.excludedScanGlobs } } -function logCleanupPlanDiagnostics( - logger: ILogger, - targets: CleanupTargetCollections -): void { - logger.debug('cleanup plan built', { - filesToDelete: targets.filesToDelete.length, - dirsToDelete: targets.dirsToDelete.length, - violations: targets.violations.length, - conflicts: targets.conflicts.length, - excludedScanGlobs: targets.excludedScanGlobs - }) -} - -/** - * Perform cleanup operation for output plugins. - * This is the main reusable cleanup function that can be called from both - * CleanCommand and ExecuteCommand (for pre-cleanup). - */ export async function performCleanup( outputPlugins: readonly OutputPlugin[], cleanCtx: OutputCleanContext, logger: ILogger, predeclaredOutputs?: ReadonlyMap -): Promise { +): Promise { + if (!hasNativeCleanupBinding()) { + return performCleanupFallback(outputPlugins, cleanCtx, logger, predeclaredOutputs) + } + if (predeclaredOutputs != null) { const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx, predeclaredOutputs) logger.debug('Collected outputs for cleanup', { @@ -457,51 +387,50 @@ export async function performCleanup( }) } - let targets: Awaited> - try { - targets = await collectDeletionTargets(outputPlugins, cleanCtx, predeclaredOutputs) - } - catch (error) { - if (error instanceof CleanupProtectionConflictError) { - logCleanupProtectionConflicts(logger, error.conflicts) - return { - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: [], - conflicts: error.conflicts, - message: error.message - } + const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) + const result = await performCleanupWithNative(snapshot) + + logCleanupPlanDiagnostics(logger, result) + + if (result.conflicts.length > 0) { + logCleanupProtectionConflicts(logger, result.conflicts) + return { + deletedFiles: 0, + deletedDirs: 0, + errors: [], + violations: [], + conflicts: result.conflicts, + message: buildCleanupProtectionConflictMessage(result.conflicts) } - throw error } - const cleanupTargets: CleanupTargetCollections = { - filesToDelete: targets.filesToDelete, - dirsToDelete: targets.dirsToDelete, - violations: targets.violations, - conflicts: targets.conflicts, - excludedScanGlobs: targets.excludedScanGlobs - } - logCleanupPlanDiagnostics(logger, cleanupTargets) - if (cleanupTargets.violations.length > 0) { - logProtectedDeletionGuardError(logger, 'cleanup', cleanupTargets.violations) + if (result.violations.length > 0) { + logProtectedDeletionGuardError(logger, 'cleanup', result.violations) return { deletedFiles: 0, deletedDirs: 0, errors: [], - violations: cleanupTargets.violations, + violations: result.violations, conflicts: [], - message: `Protected deletion guard blocked cleanup for ${cleanupTargets.violations.length} path(s)` + message: `Protected deletion guard blocked cleanup for ${result.violations.length} path(s)` } } - const executionResult = await executeCleanupTargets(cleanupTargets, logger) + logger.debug('cleanup delete execution started', { + filesToDelete: result.filesToDelete.length, + dirsToDelete: result.dirsToDelete.length + }) + const loggedErrors = logNativeCleanupErrors(logger, result.errors) + logger.debug('cleanup delete execution complete', { + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs, + errors: loggedErrors.length + }) return { - deletedFiles: executionResult.deletedFiles, - deletedDirs: executionResult.deletedDirs, - errors: executionResult.errors, + deletedFiles: result.deletedFiles, + deletedDirs: result.deletedDirs, + errors: loggedErrors, violations: [], conflicts: [] } diff --git a/cli/src/core/cleanup.rs b/cli/src/core/cleanup.rs new file mode 100644 index 00000000..a9f9c3f2 --- /dev/null +++ b/cli/src/core/cleanup.rs @@ -0,0 +1,1321 @@ +use std::collections::{BTreeSet, HashMap, HashSet}; +use std::env; +use std::fs; +use std::path::{Component, Path, PathBuf}; + +use globset::{Glob, GlobBuilder, GlobSet, GlobSetBuilder}; +use serde::{Deserialize, Serialize}; +use walkdir::WalkDir; + +use crate::core::{config, desk_paths}; + +const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS: [&str; 6] = [ + "**/node_modules/**", + "**/.git/**", + "**/.turbo/**", + "**/.pnpm-store/**", + "**/.yarn/**", + "**/.next/**", +]; + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionModeDto { + Direct, + Recursive, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum ProtectionRuleMatcherDto { + Path, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupTargetKindDto { + File, + Directory, + Glob, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum CleanupErrorKindDto { + File, + Directory, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupTargetDto { + pub path: String, + pub kind: CleanupTargetKindDto, + #[serde(default)] + pub exclude_basenames: Vec, + pub protection_mode: Option, + pub scope: Option, + pub label: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupDeclarationsDto { + #[serde(default)] + pub delete: Vec, + #[serde(default)] + pub protect: Vec, + #[serde(default)] + pub exclude_scan_globs: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PluginCleanupSnapshotDto { + pub plugin_name: String, + #[serde(default)] + pub outputs: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedRuleDto { + pub path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, + pub matcher: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupSnapshot { + pub workspace_dir: String, + pub aindex_dir: Option, + #[serde(default)] + pub project_roots: Vec, + #[serde(default)] + pub protected_rules: Vec, + #[serde(default)] + pub plugin_snapshots: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ProtectedPathViolationDto { + pub target_path: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub reason: String, + pub source: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupProtectionConflictDto { + pub output_path: String, + pub output_plugin: String, + pub protected_path: String, + pub protection_mode: ProtectionModeDto, + pub protected_by: String, + pub reason: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupPlan { + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupErrorDto { + pub path: String, + pub kind: CleanupErrorKindDto, + pub error: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct CleanupExecutionResultDto { + pub deleted_files: usize, + pub deleted_dirs: usize, + pub errors: Vec, + pub violations: Vec, + pub conflicts: Vec, + pub files_to_delete: Vec, + pub dirs_to_delete: Vec, + pub excluded_scan_globs: Vec, +} + +#[derive(Debug, Clone)] +struct CompiledProtectedRule { + path: String, + protection_mode: ProtectionModeDto, + reason: String, + source: String, + comparison_keys: Vec, + normalized_path: String, + specificity: usize, +} + +#[derive(Debug, Clone)] +struct ProtectedDeletionGuard { + compiled_rules: Vec, +} + +struct PartitionResult { + safe_paths: Vec, + violations: Vec, +} + +fn resolve_home_dir() -> PathBuf { + let runtime_environment = config::resolve_runtime_environment(); + runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + .unwrap_or_else(|| PathBuf::from("/")) +} + +fn expand_home_path(raw_path: &str) -> PathBuf { + if raw_path == "~" || raw_path.starts_with("~/") || raw_path.starts_with("~\\") { + return config::resolve_tilde(raw_path); + } + PathBuf::from(raw_path) +} + +fn normalize_path(path: &Path) -> PathBuf { + let mut normalized = PathBuf::new(); + + for component in path.components() { + match component { + Component::Prefix(prefix) => normalized.push(prefix.as_os_str()), + Component::RootDir => normalized.push(Path::new(std::path::MAIN_SEPARATOR_STR)), + Component::CurDir => {} + Component::ParentDir => { + let popped = normalized.pop(); + if !popped && !path.is_absolute() { + normalized.push(".."); + } + } + Component::Normal(segment) => normalized.push(segment), + } + } + + if normalized.as_os_str().is_empty() { + if path.is_absolute() { + return PathBuf::from(std::path::MAIN_SEPARATOR_STR); + } + return PathBuf::from("."); + } + + normalized +} + +fn resolve_absolute_path(raw_path: &str) -> PathBuf { + let expanded = expand_home_path(raw_path); + let candidate = if expanded.is_absolute() { + expanded + } else { + env::current_dir() + .unwrap_or_else(|_| PathBuf::from(".")) + .join(expanded) + }; + + normalize_path(&candidate) +} + +fn path_to_string(path: &Path) -> String { + path.to_string_lossy().into_owned() +} + +fn path_to_glob_string(path: &Path) -> String { + path_to_string(path).replace('\\', "/") +} + +fn normalize_glob_pattern(pattern: &str) -> String { + path_to_glob_string(&resolve_absolute_path(pattern)) +} + +fn normalize_for_comparison(raw_path: &str) -> String { + let normalized = path_to_string(&resolve_absolute_path(raw_path)); + if cfg!(windows) { + normalized.to_lowercase() + } else { + normalized + } +} + +fn build_comparison_keys(raw_path: &str) -> Vec { + let absolute = resolve_absolute_path(raw_path); + let mut keys = HashSet::from([normalize_for_comparison(&path_to_string(&absolute))]); + + if let Ok(real_path) = fs::canonicalize(&absolute) { + keys.insert(normalize_for_comparison(&path_to_string(&real_path))); + } + + let mut collected = keys.into_iter().collect::>(); + collected.sort(); + collected +} + +fn is_same_or_child_path(candidate: &str, parent: &str) -> bool { + if candidate == parent { + return true; + } + + let separator = std::path::MAIN_SEPARATOR; + let prefix = if parent.ends_with(separator) { + parent.to_string() + } else { + format!("{parent}{separator}") + }; + + candidate.starts_with(&prefix) +} + +fn create_protected_rule( + raw_path: &str, + protection_mode: ProtectionModeDto, + reason: impl Into, + source: impl Into, + matcher: Option, +) -> ProtectedRuleDto { + ProtectedRuleDto { + path: path_to_string(&resolve_absolute_path(raw_path)), + protection_mode, + reason: reason.into(), + source: source.into(), + matcher, + } +} + +fn compile_rule(rule: &ProtectedRuleDto) -> CompiledProtectedRule { + let normalized_path = normalize_for_comparison(&rule.path); + CompiledProtectedRule { + path: path_to_string(&resolve_absolute_path(&rule.path)), + protection_mode: rule.protection_mode, + reason: rule.reason.clone(), + source: rule.source.clone(), + comparison_keys: build_comparison_keys(&rule.path), + specificity: normalized_path.trim_end_matches(std::path::MAIN_SEPARATOR).len(), + normalized_path, + } +} + +fn dedupe_and_compile_rules(rules: &[ProtectedRuleDto]) -> Vec { + let mut compiled_by_key = HashMap::new(); + + for rule in rules { + let compiled = compile_rule(rule); + compiled_by_key.insert( + format!( + "{}:{}", + match compiled.protection_mode { + ProtectionModeDto::Direct => "direct", + ProtectionModeDto::Recursive => "recursive", + }, + compiled.normalized_path + ), + compiled, + ); + } + + let mut compiled = compiled_by_key.into_values().collect::>(); + compiled.sort_by(|a, b| { + b.specificity + .cmp(&a.specificity) + .then_with(|| match (a.protection_mode, b.protection_mode) { + (ProtectionModeDto::Recursive, ProtectionModeDto::Direct) => std::cmp::Ordering::Less, + (ProtectionModeDto::Direct, ProtectionModeDto::Recursive) => std::cmp::Ordering::Greater, + _ => std::cmp::Ordering::Equal, + }) + .then_with(|| a.path.cmp(&b.path)) + }); + compiled +} + +fn glob_builder(pattern: &str) -> Result { + GlobBuilder::new(pattern) + .literal_separator(true) + .backslash_escape(false) + .case_insensitive(cfg!(windows)) + .build() + .map_err(|error| error.to_string()) +} + +fn build_globset(patterns: &[String]) -> Result, String> { + if patterns.is_empty() { + return Ok(None); + } + + let mut builder = GlobSetBuilder::new(); + for pattern in patterns { + builder.add(glob_builder(pattern)?); + } + builder.build().map(Some).map_err(|error| error.to_string()) +} + +fn has_glob_magic(value: &str) -> bool { + value.contains('*') + || value.contains('?') + || value.contains('[') + || value.contains(']') + || value.contains('{') + || value.contains('}') + || value.contains('!') +} + +fn detect_glob_scan_root(pattern: &str) -> PathBuf { + let normalized = pattern.replace('\\', "/"); + if !has_glob_magic(&normalized) { + return resolve_absolute_path(&normalized); + } + + let first_magic_index = normalized + .char_indices() + .find_map(|(index, character)| has_glob_magic(&character.to_string()).then_some(index)) + .unwrap_or(normalized.len()); + + let prefix = normalized[..first_magic_index].trim_end_matches('/'); + if prefix.is_empty() { + return env::current_dir().unwrap_or_else(|_| PathBuf::from(".")); + } + + let scan_root = prefix.rsplit_once('/').map_or(prefix, |(head, _)| { + if head.is_empty() { + if normalized.starts_with('/') { + "/" + } else { + prefix + } + } else { + head + } + }); + + resolve_absolute_path(scan_root) +} + +fn expand_glob(pattern: &str, ignore_globs: &[String]) -> Result, String> { + let normalized_pattern = normalize_glob_pattern(pattern); + let matcher = build_globset(std::slice::from_ref(&normalized_pattern))? + .ok_or_else(|| "failed to compile cleanup glob".to_string())?; + let ignore_matcher = build_globset(ignore_globs)?; + + if !has_glob_magic(&normalized_pattern) { + let absolute_path = resolve_absolute_path(&normalized_pattern); + if !absolute_path.exists() { + return Ok(vec![]); + } + let candidate = path_to_glob_string(&absolute_path); + if ignore_matcher + .as_ref() + .is_some_and(|compiled| compiled.is_match(&candidate)) + { + return Ok(vec![]); + } + if matcher.is_match(&candidate) { + return Ok(vec![path_to_string(&absolute_path)]); + } + return Ok(vec![]); + } + + let scan_root = detect_glob_scan_root(&normalized_pattern); + if !scan_root.exists() { + return Ok(vec![]); + } + + let mut matches = Vec::new(); + let walker = WalkDir::new(&scan_root) + .follow_links(false) + .into_iter() + .filter_entry(|entry| { + let candidate = path_to_glob_string(entry.path()); + !ignore_matcher + .as_ref() + .is_some_and(|compiled| compiled.is_match(&candidate)) + }); + + for entry in walker { + let Ok(entry) = entry else { + continue; + }; + let candidate = path_to_glob_string(entry.path()); + if matcher.is_match(&candidate) { + matches.push(path_to_string(&normalize_path(entry.path()))); + } + } + + matches.sort(); + matches.dedup(); + Ok(matches) +} + +fn expand_protected_rules(rules: &[ProtectedRuleDto]) -> Result, String> { + let mut expanded = Vec::new(); + + for rule in rules { + if !matches!(rule.matcher, Some(ProtectionRuleMatcherDto::Glob)) { + expanded.push(create_protected_rule( + &rule.path, + rule.protection_mode, + rule.reason.clone(), + rule.source.clone(), + None, + )); + continue; + } + + for matched_path in expand_glob(&rule.path, &[])? { + expanded.push(create_protected_rule( + &matched_path, + rule.protection_mode, + rule.reason.clone(), + rule.source.clone(), + None, + )); + } + } + + Ok(expanded) +} + +fn root_path_for(path: &Path) -> PathBuf { + let mut root = PathBuf::new(); + for component in path.components() { + match component { + Component::Prefix(prefix) => root.push(prefix.as_os_str()), + Component::RootDir => { + root.push(Path::new(std::path::MAIN_SEPARATOR_STR)); + break; + } + _ => break, + } + } + if root.as_os_str().is_empty() { + return PathBuf::from(std::path::MAIN_SEPARATOR_STR); + } + root +} + +fn collect_built_in_dangerous_path_rules() -> Vec { + let home_dir = resolve_home_dir(); + let xdg_config_home = env::var("XDG_CONFIG_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".config")); + let xdg_data_home = env::var("XDG_DATA_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".local/share")); + let xdg_state_home = env::var("XDG_STATE_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".local/state")); + let xdg_cache_home = env::var("XDG_CACHE_HOME") + .ok() + .filter(|value| !value.trim().is_empty()) + .map(|value| resolve_absolute_path(&value)) + .unwrap_or_else(|| home_dir.join(".cache")); + + vec![ + create_protected_rule( + &path_to_string(&root_path_for(&home_dir)), + ProtectionModeDto::Direct, + "built-in dangerous root path", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir), + ProtectionModeDto::Direct, + "built-in dangerous home directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_config_home), + ProtectionModeDto::Direct, + "built-in dangerous config directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_data_home), + ProtectionModeDto::Direct, + "built-in dangerous data directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_state_home), + ProtectionModeDto::Direct, + "built-in dangerous state directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&xdg_cache_home), + ProtectionModeDto::Direct, + "built-in dangerous cache directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir.join(".aindex")), + ProtectionModeDto::Direct, + "built-in global aindex directory", + "built-in-dangerous-root", + None, + ), + create_protected_rule( + &path_to_string(&home_dir.join(".aindex/.tnmsc.json")), + ProtectionModeDto::Direct, + "built-in global config file", + "built-in-config", + None, + ), + ] +} + +fn collect_workspace_reserved_rules( + workspace_dir: &str, + project_roots: &[String], + include_reserved_workspace_content_roots: bool, +) -> Vec { + let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir)); + let mut rules = vec![ + create_protected_rule( + &workspace_dir, + ProtectionModeDto::Direct, + "workspace root", + "workspace-reserved", + None, + ), + create_protected_rule( + &path_to_string(&resolve_absolute_path(&format!("{workspace_dir}/aindex"))), + ProtectionModeDto::Direct, + "reserved workspace aindex root", + "workspace-reserved", + None, + ), + create_protected_rule( + &path_to_string(&resolve_absolute_path(&format!("{workspace_dir}/knowladge"))), + ProtectionModeDto::Direct, + "reserved workspace knowladge root", + "workspace-reserved", + None, + ), + ]; + + for project_root in project_roots { + rules.push(create_protected_rule( + project_root, + ProtectionModeDto::Direct, + "workspace project root", + "workspace-project-root", + None, + )); + } + + if include_reserved_workspace_content_roots { + rules.push(create_protected_rule( + &format!("{workspace_dir}/aindex/dist/**/*.mdx"), + ProtectionModeDto::Direct, + "reserved workspace aindex dist mdx files", + "workspace-reserved", + Some(ProtectionRuleMatcherDto::Glob), + )); + rules.push(create_protected_rule( + &format!("{workspace_dir}/aindex/app/**/*.mdx"), + ProtectionModeDto::Direct, + "reserved workspace aindex app mdx files", + "workspace-reserved", + Some(ProtectionRuleMatcherDto::Glob), + )); + } + + rules +} + +fn create_guard(snapshot: &CleanupSnapshot, rules: &[ProtectedRuleDto]) -> Result { + let mut all_rules = collect_built_in_dangerous_path_rules(); + all_rules.extend(collect_workspace_reserved_rules( + &snapshot.workspace_dir, + &snapshot.project_roots, + true, + )); + + if let Some(aindex_dir) = snapshot.aindex_dir.as_ref() { + all_rules.push(create_protected_rule( + aindex_dir, + ProtectionModeDto::Direct, + "resolved aindex root", + "aindex-root", + None, + )); + } + + all_rules.extend_from_slice(rules); + let compiled_rules = dedupe_and_compile_rules(&expand_protected_rules(&all_rules)?); + + Ok(ProtectedDeletionGuard { compiled_rules }) +} + +fn is_rule_match(target_key: &str, rule_key: &str, protection_mode: ProtectionModeDto) -> bool { + match protection_mode { + ProtectionModeDto::Direct => is_same_or_child_path(rule_key, target_key), + ProtectionModeDto::Recursive => { + is_same_or_child_path(target_key, rule_key) || is_same_or_child_path(rule_key, target_key) + } + } +} + +fn select_more_specific_rule( + candidate: &CompiledProtectedRule, + current: Option<&CompiledProtectedRule>, +) -> CompiledProtectedRule { + let Some(current) = current else { + return candidate.clone(); + }; + + if candidate.specificity != current.specificity { + return if candidate.specificity > current.specificity { + candidate.clone() + } else { + current.clone() + }; + } + + if candidate.protection_mode != current.protection_mode { + return if candidate.protection_mode == ProtectionModeDto::Recursive { + candidate.clone() + } else { + current.clone() + }; + } + + if candidate.path < current.path { + candidate.clone() + } else { + current.clone() + } +} + +fn get_protected_path_violation( + target_path: &str, + guard: &ProtectedDeletionGuard, +) -> Option { + let absolute_target_path = path_to_string(&resolve_absolute_path(target_path)); + let target_keys = build_comparison_keys(&absolute_target_path); + let mut matched_rule: Option = None; + + for rule in &guard.compiled_rules { + let mut did_match = false; + for target_key in &target_keys { + for rule_key in &rule.comparison_keys { + if !is_rule_match(target_key, rule_key, rule.protection_mode) { + continue; + } + + matched_rule = Some(select_more_specific_rule(rule, matched_rule.as_ref())); + did_match = true; + break; + } + if did_match { + break; + } + } + } + + matched_rule.map(|rule| ProtectedPathViolationDto { + target_path: absolute_target_path, + protected_path: rule.path, + protection_mode: rule.protection_mode, + reason: rule.reason, + source: rule.source, + }) +} + +fn partition_deletion_targets(paths: &[String], guard: &ProtectedDeletionGuard) -> PartitionResult { + let mut safe_paths = Vec::new(); + let mut violations = Vec::new(); + + for target_path in paths { + if let Some(violation) = get_protected_path_violation(target_path, guard) { + violations.push(violation); + } else { + safe_paths.push(path_to_string(&resolve_absolute_path(target_path))); + } + } + + safe_paths.sort(); + violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); + + PartitionResult { safe_paths, violations } +} + +fn compact_deletion_targets(files: &[String], dirs: &[String]) -> (Vec, Vec) { + let files_by_key = files + .iter() + .map(|file_path| { + let resolved = path_to_string(&resolve_absolute_path(file_path)); + (resolved.clone(), resolved) + }) + .collect::>(); + let dirs_by_key = dirs + .iter() + .map(|dir_path| { + let resolved = path_to_string(&resolve_absolute_path(dir_path)); + (resolved.clone(), resolved) + }) + .collect::>(); + + let mut sorted_dir_entries = dirs_by_key.into_iter().collect::>(); + sorted_dir_entries.sort_by(|(left_key, _), (right_key, _)| left_key.len().cmp(&right_key.len())); + + let mut compacted_dirs: HashMap = HashMap::new(); + for (dir_key, dir_path) in sorted_dir_entries { + let covered_by_parent = compacted_dirs + .keys() + .any(|existing_parent_key| is_same_or_child_path(&dir_key, existing_parent_key)); + if !covered_by_parent { + compacted_dirs.insert(dir_key, dir_path); + } + } + + let mut compacted_files = Vec::new(); + for (file_key, file_path) in files_by_key { + let covered_by_dir = compacted_dirs + .keys() + .any(|dir_key| is_same_or_child_path(&file_key, dir_key)); + if !covered_by_dir { + compacted_files.push(file_path); + } + } + + compacted_files.sort(); + let mut compacted_dir_paths = compacted_dirs.into_values().collect::>(); + compacted_dir_paths.sort(); + + (compacted_files, compacted_dir_paths) +} + +fn detect_cleanup_protection_conflicts( + output_path_owners: &HashMap>, + guard: &ProtectedDeletionGuard, +) -> Vec { + let mut conflicts = Vec::new(); + + for (output_path, output_plugins) in output_path_owners { + let output_keys = build_comparison_keys(output_path) + .into_iter() + .collect::>(); + + for rule in &guard.compiled_rules { + let is_exact_match = rule + .comparison_keys + .iter() + .any(|rule_key| output_keys.contains(rule_key)); + if !is_exact_match { + continue; + } + + for output_plugin in output_plugins { + conflicts.push(CleanupProtectionConflictDto { + output_path: output_path.clone(), + output_plugin: output_plugin.clone(), + protected_path: rule.path.clone(), + protection_mode: rule.protection_mode, + protected_by: rule.source.clone(), + reason: rule.reason.clone(), + }); + } + } + } + + conflicts.sort_by(|a, b| { + a.output_path + .cmp(&b.output_path) + .then_with(|| a.protected_path.cmp(&b.protected_path)) + }); + conflicts +} + +fn should_exclude_cleanup_match(matched_path: &str, target: &CleanupTargetDto) -> bool { + if target.exclude_basenames.is_empty() { + return false; + } + + let basename = Path::new(matched_path) + .file_name() + .map(|value| value.to_string_lossy().into_owned()); + basename + .as_ref() + .is_some_and(|value| target.exclude_basenames.contains(value)) +} + +fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionModeDto { + target.protection_mode.unwrap_or(match target.kind { + CleanupTargetKindDto::File => ProtectionModeDto::Direct, + CleanupTargetKindDto::Directory | CleanupTargetKindDto::Glob => ProtectionModeDto::Recursive, + }) +} + +pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { + let mut delete_files = HashSet::new(); + let mut delete_dirs = HashSet::new(); + let mut protected_rules = snapshot.protected_rules.clone(); + let mut exclude_scan_globs = + BTreeSet::from_iter(DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS.iter().map(|value| (*value).to_string())); + let mut output_path_owners = HashMap::>::new(); + + for plugin_snapshot in &snapshot.plugin_snapshots { + for output in &plugin_snapshot.outputs { + let resolved_output_path = path_to_string(&resolve_absolute_path(output)); + delete_files.insert(resolved_output_path.clone()); + output_path_owners + .entry(resolved_output_path) + .or_default() + .push(plugin_snapshot.plugin_name.clone()); + } + + for ignore_glob in &plugin_snapshot.cleanup.exclude_scan_globs { + exclude_scan_globs.insert(normalize_glob_pattern(ignore_glob)); + } + } + + let ignore_globs = exclude_scan_globs.iter().cloned().collect::>(); + + for plugin_snapshot in &snapshot.plugin_snapshots { + for target in &plugin_snapshot.cleanup.protect { + if target.kind == CleanupTargetKindDto::Glob { + let protection_mode = default_protection_mode_for_target(target); + let reason = target + .label + .as_ref() + .map(|label| format!("plugin cleanup protect declaration ({label})")) + .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); + + for matched_path in expand_glob(&target.path, &ignore_globs)? { + protected_rules.push(create_protected_rule( + &matched_path, + protection_mode, + reason.clone(), + format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + None, + )); + } + continue; + } + + let reason = target + .label + .as_ref() + .map(|label| format!("plugin cleanup protect declaration ({label})")) + .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); + protected_rules.push(create_protected_rule( + &target.path, + default_protection_mode_for_target(target), + reason, + format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + None, + )); + } + + for target in &plugin_snapshot.cleanup.delete { + if target.kind == CleanupTargetKindDto::Glob { + for matched_path in expand_glob(&target.path, &ignore_globs)? { + if should_exclude_cleanup_match(&matched_path, target) { + continue; + } + + let Ok(metadata) = fs::symlink_metadata(&matched_path) else { + continue; + }; + if metadata.is_dir() { + delete_dirs.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } else { + delete_files.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } + } + continue; + } + + match target.kind { + CleanupTargetKindDto::Directory => { + delete_dirs.insert(path_to_string(&resolve_absolute_path(&target.path))); + } + CleanupTargetKindDto::File => { + delete_files.insert(path_to_string(&resolve_absolute_path(&target.path))); + } + CleanupTargetKindDto::Glob => {} + } + } + } + + let guard = create_guard(&snapshot, &protected_rules)?; + let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); + if !conflicts.is_empty() { + return Ok(CleanupPlan { + files_to_delete: Vec::new(), + dirs_to_delete: Vec::new(), + violations: Vec::new(), + conflicts, + excluded_scan_globs: ignore_globs, + }); + } + + let file_partition = + partition_deletion_targets(&delete_files.into_iter().collect::>(), &guard); + let dir_partition = + partition_deletion_targets(&delete_dirs.into_iter().collect::>(), &guard); + let (files_to_delete, dirs_to_delete) = + compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); + + let mut violations = file_partition.violations; + violations.extend(dir_partition.violations); + violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); + + Ok(CleanupPlan { + files_to_delete, + dirs_to_delete, + violations, + conflicts: Vec::new(), + excluded_scan_globs: ignore_globs, + }) +} + +pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { + let plan = plan_cleanup(snapshot)?; + if !plan.conflicts.is_empty() || !plan.violations.is_empty() { + return Ok(CleanupExecutionResultDto { + deleted_files: 0, + deleted_dirs: 0, + errors: Vec::new(), + violations: plan.violations, + conflicts: plan.conflicts, + files_to_delete: plan.files_to_delete, + dirs_to_delete: plan.dirs_to_delete, + excluded_scan_globs: plan.excluded_scan_globs, + }); + } + + let delete_result = desk_paths::delete_targets(&plan.files_to_delete, &plan.dirs_to_delete); + let mut errors = delete_result + .file_errors + .into_iter() + .map(|error| CleanupErrorDto { + path: error.path, + kind: CleanupErrorKindDto::File, + error: error.error, + }) + .collect::>(); + errors.extend(delete_result.dir_errors.into_iter().map(|error| CleanupErrorDto { + path: error.path, + kind: CleanupErrorKindDto::Directory, + error: error.error, + })); + + Ok(CleanupExecutionResultDto { + deleted_files: delete_result.deleted_files.len(), + deleted_dirs: delete_result.deleted_dirs.len(), + errors, + violations: Vec::new(), + conflicts: Vec::new(), + files_to_delete: plan.files_to_delete, + dirs_to_delete: plan.dirs_to_delete, + excluded_scan_globs: plan.excluded_scan_globs, + }) +} + +#[cfg(feature = "napi")] +mod napi_binding { + use napi_derive::napi; + + use super::{CleanupExecutionResultDto, CleanupPlan, CleanupSnapshot}; + + fn parse_snapshot(snapshot_json: String) -> napi::Result { + serde_json::from_str(&snapshot_json).map_err(|error| napi::Error::from_reason(error.to_string())) + } + + fn serialize_result(result: &T) -> napi::Result { + serde_json::to_string(result).map_err(|error| napi::Error::from_reason(error.to_string())) + } + + #[napi] + pub fn plan_cleanup(snapshot_json: String) -> napi::Result { + let snapshot = parse_snapshot(snapshot_json)?; + let result: CleanupPlan = + super::plan_cleanup(snapshot).map_err(napi::Error::from_reason)?; + serialize_result(&result) + } + + #[napi] + pub fn perform_cleanup(snapshot_json: String) -> napi::Result { + let snapshot = parse_snapshot(snapshot_json)?; + let result: CleanupExecutionResultDto = + super::perform_cleanup(snapshot).map_err(napi::Error::from_reason)?; + serialize_result(&result) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use tempfile::tempdir; + + fn empty_snapshot(workspace_dir: &Path) -> CleanupSnapshot { + CleanupSnapshot { + workspace_dir: path_to_string(workspace_dir), + aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), + project_roots: vec![path_to_string(&workspace_dir.join("project-a"))], + protected_rules: Vec::new(), + plugin_snapshots: Vec::new(), + } + } + + fn single_plugin_snapshot( + workspace_dir: &Path, + outputs: Vec, + cleanup: CleanupDeclarationsDto, + ) -> CleanupSnapshot { + CleanupSnapshot { + plugin_snapshots: vec![PluginCleanupSnapshotDto { + plugin_name: "MockOutputPlugin".to_string(), + outputs, + cleanup, + }], + ..empty_snapshot(workspace_dir) + } + } + + #[test] + fn detects_exact_output_protection_conflicts() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let output_path = workspace_dir.join("project-a/AGENTS.md"); + fs::create_dir_all(output_path.parent().unwrap()).unwrap(); + fs::write(&output_path, "# output").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&output_path)], + CleanupDeclarationsDto { + protect: vec![CleanupTargetDto { + path: path_to_string(&output_path), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert_eq!(plan.conflicts.len(), 1); + assert!(plan.files_to_delete.is_empty()); + assert!(plan.dirs_to_delete.is_empty()); + } + + #[test] + fn expands_delete_globs_and_respects_excluded_basenames() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let skills_dir = workspace_dir.join(".codex/skills"); + let system_dir = skills_dir.join(".system"); + let stale_dir = skills_dir.join("legacy"); + fs::create_dir_all(&system_dir).unwrap(); + fs::create_dir_all(&stale_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&skills_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: vec![".system".to_string()], + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.contains(&path_to_string(&stale_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&system_dir))); + } + + #[test] + fn preserves_direct_vs_recursive_guard_behavior() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let direct_dir = workspace_dir.join("project-a"); + let recursive_dir = workspace_dir.join("aindex/dist"); + let direct_file = direct_dir.join("AGENTS.md"); + let recursive_file = recursive_dir.join("commands/demo.mdx"); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&direct_file), path_to_string(&recursive_file)], + CleanupDeclarationsDto { + protect: vec![ + CleanupTargetDto { + path: path_to_string(&direct_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Direct), + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&recursive_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Recursive), + scope: None, + label: None, + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.files_to_delete.contains(&path_to_string(&direct_file))); + assert!(plan + .violations + .iter() + .any(|violation| violation.target_path == path_to_string(&recursive_file))); + } + + #[test] + fn blocks_reserved_workspace_mdx_descendants() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let protected_file = workspace_dir.join("aindex/dist/commands/demo.mdx"); + fs::create_dir_all(protected_file.parent().unwrap()).unwrap(); + fs::write(&protected_file, "# demo").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&workspace_dir.join("aindex/dist")), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.is_empty()); + assert_eq!(plan.violations.len(), 1); + assert_eq!(plan.violations[0].protected_path, path_to_string(&protected_file)); + } + + #[cfg(unix)] + #[test] + fn matches_symlink_realpaths_against_protected_paths() { + use std::os::unix::fs::symlink; + + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let symlink_path = temp_dir.path().join("workspace-link"); + fs::create_dir_all(&workspace_dir).unwrap(); + symlink(&workspace_dir, &symlink_path).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&symlink_path), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(plan.dirs_to_delete.is_empty()); + assert!(plan + .violations + .iter() + .any(|violation| violation.target_path == path_to_string(&symlink_path))); + } + + #[test] + fn compacts_nested_directory_targets() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let base_dir = workspace_dir.join(".claude"); + let rules_dir = base_dir.join("rules"); + let rule_file = rules_dir.join("demo.md"); + fs::create_dir_all(&rules_dir).unwrap(); + fs::write(&rule_file, "# demo").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![path_to_string(&rule_file)], + CleanupDeclarationsDto { + delete: vec![ + CleanupTargetDto { + path: path_to_string(&base_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&rules_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + CleanupTargetDto { + path: path_to_string(&rule_file), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + assert_eq!(plan.dirs_to_delete, vec![path_to_string(&base_dir)]); + assert!(plan.files_to_delete.is_empty()); + } +} diff --git a/cli/src/core/desk-paths.ts b/cli/src/core/desk-paths.ts index e289760b..d44dd3aa 100644 --- a/cli/src/core/desk-paths.ts +++ b/cli/src/core/desk-paths.ts @@ -5,10 +5,9 @@ import type { SafeWriteOptions, SafeWriteResult } from './desk-paths-fallback' -import {createRequire} from 'node:module' -import process from 'node:process' import {buildFileOperationDiagnostic} from '@/diagnostics' import * as fallback from './desk-paths-fallback' +import {getNativeBinding} from './native-binding' export type { DeleteTargetsResult, @@ -42,59 +41,7 @@ type NativeDeleteTargetsResult = DeleteTargetsResult & { readonly dir_errors?: readonly import('./desk-paths-fallback').DeletionError[] } -function shouldSkipNativeBinding(): boolean { - return process.env['NODE_ENV'] === 'test' - || process.env['VITEST'] != null - || process.env['VITEST_WORKER_ID'] != null -} - -function tryLoadNativeBinding(): NativeDeskPathsBinding | undefined { - if (shouldSkipNativeBinding()) return void 0 - - const suffixMap: Readonly> = { - 'win32-x64': 'win32-x64-msvc', - 'linux-x64': 'linux-x64-gnu', - 'linux-arm64': 'linux-arm64-gnu', - 'darwin-arm64': 'darwin-arm64', - 'darwin-x64': 'darwin-x64' - } - const suffix = suffixMap[`${process.platform}-${process.arch}`] - if (suffix == null) return void 0 - - try { - const _require = createRequire(import.meta.url) - const packageName = `@truenine/memory-sync-cli-${suffix}` - const binaryFile = `napi-memory-sync-cli.${suffix}.node` - const candidates = [ - packageName, - `${packageName}/${binaryFile}`, - `./${binaryFile}` - ] - - for (const specifier of candidates) { - try { - const loaded = _require(specifier) as unknown - const possibleBindings = [ - loaded, - (loaded as {default?: unknown})?.default, - (loaded as {config?: unknown})?.config, - (loaded as {default?: {config?: unknown}})?.default?.config - ] - - for (const candidate of possibleBindings) { - if (candidate != null && typeof candidate === 'object') return candidate as NativeDeskPathsBinding - } - } - catch {} - } - } - catch { - } - - return void 0 -} - -const nativeBinding = tryLoadNativeBinding() +const nativeBinding = getNativeBinding() function normalizeDeletionResult(result: NativeDeletionResult): DeletionResult { return { diff --git a/cli/src/core/mod.rs b/cli/src/core/mod.rs index 7e6db79e..5881df2c 100644 --- a/cli/src/core/mod.rs +++ b/cli/src/core/mod.rs @@ -1,3 +1,4 @@ +pub mod cleanup; pub mod config; pub mod desk_paths; pub mod input_plugins; diff --git a/cli/src/core/native-binding.ts b/cli/src/core/native-binding.ts new file mode 100644 index 00000000..4ea16586 --- /dev/null +++ b/cli/src/core/native-binding.ts @@ -0,0 +1,65 @@ +import {createRequire} from 'node:module' +import process from 'node:process' + +function shouldSkipNativeBinding(): boolean { + if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false + if (process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1') return true + + return process.env['NODE_ENV'] === 'test' + || process.env['VITEST'] != null + || process.env['VITEST_WORKER_ID'] != null +} + +export function tryLoadNativeBinding(): T | undefined { + if (shouldSkipNativeBinding()) return void 0 + + const suffixMap: Readonly> = { + 'win32-x64': 'win32-x64-msvc', + 'linux-x64': 'linux-x64-gnu', + 'linux-arm64': 'linux-arm64-gnu', + 'darwin-arm64': 'darwin-arm64', + 'darwin-x64': 'darwin-x64' + } + const suffix = suffixMap[`${process.platform}-${process.arch}`] + if (suffix == null) return void 0 + + try { + const _require = createRequire(import.meta.url) + const packageName = `@truenine/memory-sync-cli-${suffix}` + const binaryFile = `napi-memory-sync-cli.${suffix}.node` + const candidates = [ + packageName, + `${packageName}/${binaryFile}`, + `./${binaryFile}`, + `../npm/${suffix}`, + `../npm/${suffix}/${binaryFile}`, + `../../npm/${suffix}`, + `../../npm/${suffix}/${binaryFile}` + ] + + for (const specifier of candidates) { + try { + const loaded = _require(specifier) as unknown + const possibleBindings = [ + (loaded as {config?: unknown})?.config, + (loaded as {default?: {config?: unknown}})?.default?.config, + (loaded as {default?: unknown})?.default, + loaded + ] + + for (const candidate of possibleBindings) { + if (candidate != null && typeof candidate === 'object') return candidate as T + } + } + catch {} + } + } + catch { + } + + return void 0 +} + +export function getNativeBinding(): T | undefined { + return tryLoadNativeBinding() +} diff --git a/cli/src/plugins/AgentsOutputPlugin.ts b/cli/src/plugins/AgentsOutputPlugin.ts index 268bb821..7aca7e77 100644 --- a/cli/src/plugins/AgentsOutputPlugin.ts +++ b/cli/src/plugins/AgentsOutputPlugin.ts @@ -1,4 +1,6 @@ import type { + OutputCleanContext, + OutputCleanupDeclarations, OutputFileDeclaration, OutputWriteContext } from './plugin-core' @@ -11,13 +13,6 @@ export class AgentsOutputPlugin extends AbstractOutputPlugin { super('AgentsOutputPlugin', { outputFileName: PROJECT_MEMORY_FILE, treatWorkspaceRootProjectAsProject: true, - cleanup: { - delete: { - project: { - files: [PROJECT_MEMORY_FILE] - } - } - }, capabilities: { prompt: { scopes: ['project'], @@ -27,6 +22,18 @@ export class AgentsOutputPlugin extends AbstractOutputPlugin { }) } + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const declarations = await super.declareCleanupPaths(ctx) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ] + } + } + override async declareOutputFiles(ctx: OutputWriteContext): Promise { const results: OutputFileDeclaration[] = [] const promptProjects = this.getProjectPromptOutputProjects(ctx) diff --git a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts index 1ff83185..7f012075 100644 --- a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts +++ b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts @@ -1,4 +1,4 @@ -import type {RulePrompt} from './plugin-core' +import type {OutputCleanContext, OutputCleanupDeclarations, RulePrompt} from './plugin-core' import {doubleQuoted} from '@truenine/md-compiler/markdown' import {AbstractOutputPlugin} from './plugin-core' @@ -44,7 +44,6 @@ export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { cleanup: { delete: { project: { - files: [PROJECT_MEMORY_FILE], dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] }, global: { @@ -81,4 +80,16 @@ export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { } }) } + + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const declarations = await super.declareCleanupPaths(ctx) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ] + } + } } diff --git a/cli/src/plugins/CodexCLIOutputPlugin.ts b/cli/src/plugins/CodexCLIOutputPlugin.ts index 61daf36c..e11d54f9 100644 --- a/cli/src/plugins/CodexCLIOutputPlugin.ts +++ b/cli/src/plugins/CodexCLIOutputPlugin.ts @@ -97,6 +97,12 @@ export class CodexCLIOutputPlugin extends AbstractOutputPlugin { super('CodexCLIOutputPlugin', CODEX_OUTPUT_OPTIONS) } + /** + * Project-scoped output still writes to the workspace project, but Codex also + * resolves user-installed skills from `~/.codex/skills/`. Cleanup therefore + * needs to prune that global skills directory as well, while preserving the + * built-in `.system/` subtree. + */ override async declareCleanupPaths(ctx: OutputCleanContext): Promise { const declarations = await super.declareCleanupPaths(ctx) diff --git a/cli/src/plugins/GeminiCLIOutputPlugin.ts b/cli/src/plugins/GeminiCLIOutputPlugin.ts index c71694e0..cdbcde1e 100644 --- a/cli/src/plugins/GeminiCLIOutputPlugin.ts +++ b/cli/src/plugins/GeminiCLIOutputPlugin.ts @@ -1,3 +1,4 @@ +import type {OutputCleanContext, OutputCleanupDeclarations} from './plugin-core' import {AbstractOutputPlugin} from './plugin-core' const PROJECT_MEMORY_FILE = 'GEMINI.md' @@ -11,9 +12,6 @@ export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { treatWorkspaceRootProjectAsProject: true, cleanup: { delete: { - project: { - globs: [PROJECT_MEMORY_FILE] - }, global: { files: ['.gemini/GEMINI.md'] } @@ -27,4 +25,16 @@ export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { } }) } + + override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + const declarations = await super.declareCleanupPaths(ctx) + + return { + ...declarations, + delete: [ + ...declarations.delete ?? [], + ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ] + } + } } diff --git a/cli/src/plugins/PromptMarkdownCleanup.test.ts b/cli/src/plugins/PromptMarkdownCleanup.test.ts new file mode 100644 index 00000000..dac280ea --- /dev/null +++ b/cli/src/plugins/PromptMarkdownCleanup.test.ts @@ -0,0 +1,207 @@ +import type {OutputCleanContext, OutputPlugin, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import glob from 'fast-glob' +import {describe, expect, it} from 'vitest' +import {collectDeletionTargets} from '../commands/CleanupUtils' +import {AgentsOutputPlugin} from './AgentsOutputPlugin' +import {ClaudeCodeCLIOutputPlugin} from './ClaudeCodeCLIOutputPlugin' +import {GeminiCLIOutputPlugin} from './GeminiCLIOutputPlugin' +import {FilePathKind, PromptKind} from './plugin-core' + +interface CleanupTestCase { + readonly name: string + readonly fileName: string + readonly createPlugin: () => OutputPlugin +} + +const TEST_CASES: readonly CleanupTestCase[] = [ + { + name: 'AgentsOutputPlugin', + fileName: 'AGENTS.md', + createPlugin: () => new AgentsOutputPlugin() + }, + { + name: 'ClaudeCodeCLIOutputPlugin', + fileName: 'CLAUDE.md', + createPlugin: () => new ClaudeCodeCLIOutputPlugin() + }, + { + name: 'GeminiCLIOutputPlugin', + fileName: 'GEMINI.md', + createPlugin: () => new GeminiCLIOutputPlugin() + } +] + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt( + workspaceDir: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceDir, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceDir, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceDir, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createCleanContext(workspaceDir: string): OutputCleanContext { + return { + logger: { + trace: () => {}, + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + fatal: () => {} + }, + fs, + path, + glob, + dryRun: true, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceDir, + getDirectoryName: () => path.basename(workspaceDir), + getAbsolutePath: () => workspaceDir + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceDir, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceDir, 'aindex') + }, + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [createChildPrompt(workspaceDir, 'aindex', 'commands', 'prompt-source child')] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [createChildPrompt(workspaceDir, 'project-a', 'commands', 'project child')] + } + ] + } + } + } as OutputCleanContext +} + +describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { + it('cleans workspace and non-prompt project markdown outputs without touching prompt-source paths', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), `tnmsc-${fileName.toLowerCase()}-cleanup-`)) + const workspaceDir = path.join(tempDir, 'workspace') + const workspaceFile = path.join(workspaceDir, fileName) + const promptSourceRootFile = path.join(workspaceDir, 'aindex', fileName) + const promptSourceChildFile = path.join(workspaceDir, 'aindex', 'commands', fileName) + const projectRootFile = path.join(workspaceDir, 'project-a', fileName) + const projectChildFile = path.join(workspaceDir, 'project-a', 'commands', fileName) + const manualProjectChildFile = path.join(workspaceDir, 'project-a', 'docs', fileName) + + fs.mkdirSync(path.dirname(promptSourceChildFile), {recursive: true}) + fs.mkdirSync(path.dirname(manualProjectChildFile), {recursive: true}) + fs.mkdirSync(path.dirname(projectChildFile), {recursive: true}) + fs.writeFileSync(workspaceFile, '# workspace', 'utf8') + fs.writeFileSync(promptSourceRootFile, '# prompt-source root', 'utf8') + fs.writeFileSync(promptSourceChildFile, '# prompt-source child', 'utf8') + fs.writeFileSync(projectRootFile, '# project root', 'utf8') + fs.writeFileSync(projectChildFile, '# project child', 'utf8') + fs.writeFileSync(manualProjectChildFile, '# manual child', 'utf8') + + try { + const result = await collectDeletionTargets([createPlugin()], createCleanContext(workspaceDir)) + const normalizedFilesToDelete = result.filesToDelete.map(target => target.replaceAll('\\', '/')) + + expect(normalizedFilesToDelete).toEqual(expect.arrayContaining([ + workspaceFile.replaceAll('\\', '/'), + projectRootFile.replaceAll('\\', '/'), + projectChildFile.replaceAll('\\', '/') + ])) + expect(normalizedFilesToDelete).not.toContain(manualProjectChildFile.replaceAll('\\', '/')) + expect(normalizedFilesToDelete).not.toContain(promptSourceRootFile.replaceAll('\\', '/')) + expect(normalizedFilesToDelete).not.toContain(promptSourceChildFile.replaceAll('\\', '/')) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) + +describe('claudeCodeCLIOutputPlugin cleanup', () => { + it('keeps project-scope .claude cleanup directories registered', async () => { + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-claude-cleanup-')) + const workspaceDir = path.join(tempDir, 'workspace') + const projectClaudeDirs = [ + path.join(workspaceDir, 'project-a', '.claude', 'rules'), + path.join(workspaceDir, 'project-a', '.claude', 'commands'), + path.join(workspaceDir, 'project-a', '.claude', 'agents'), + path.join(workspaceDir, 'project-a', '.claude', 'skills') + ] + + for (const directory of projectClaudeDirs) { + fs.mkdirSync(directory, {recursive: true}) + } + + try { + const result = await collectDeletionTargets([new ClaudeCodeCLIOutputPlugin()], createCleanContext(workspaceDir)) + const normalizedDirsToDelete = result.dirsToDelete.map(target => target.replaceAll('\\', '/')) + + expect(normalizedDirsToDelete).toEqual(expect.arrayContaining( + projectClaudeDirs.map(target => target.replaceAll('\\', '/')) + )) + } + finally { + fs.rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts index 504d1e4c..652f928b 100644 --- a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts +++ b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts @@ -560,6 +560,47 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return path.dirname(p) } + protected buildProjectPromptCleanupTargets( + ctx: OutputCleanContext, + fileName: string = this.outputFileName + ): readonly OutputCleanupPathDeclaration[] { + if (fileName.length === 0) return [] + + const declarations: OutputCleanupPathDeclaration[] = [] + const seenPaths = new Set() + + const pushCleanupFile = ( + targetPath: string, + label: string + ): void => { + if (seenPaths.has(targetPath)) return + seenPaths.add(targetPath) + declarations.push({ + path: targetPath, + kind: 'file', + scope: 'project', + label + }) + } + + for (const project of this.getProjectPromptOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + + pushCleanupFile(this.resolvePath(projectRootDir, fileName), 'delete.project') + + if (project.childMemoryPrompts == null) continue + for (const child of project.childMemoryPrompts) { + pushCleanupFile( + this.resolveFullPath(child.dir, fileName), + 'delete.project.child' + ) + } + } + + return declarations + } + protected basename(p: string, ext?: string): string { return path.basename(p, ext) }