diff --git a/.github/actions/setup-node-pnpm/action.yml b/.github/actions/setup-node-pnpm/action.yml index d2ef2cbd..b5e18b9a 100644 --- a/.github/actions/setup-node-pnpm/action.yml +++ b/.github/actions/setup-node-pnpm/action.yml @@ -5,7 +5,11 @@ inputs: node-version: description: Node.js version required: false - default: "25" + default: "25.6.1" + pnpm-version: + description: pnpm version + required: false + default: "10.33.0" install: description: Whether to run pnpm install required: false @@ -16,6 +20,8 @@ runs: steps: - name: Setup pnpm uses: pnpm/action-setup@v4 + with: + version: ${{ inputs.pnpm-version }} - name: Setup Node uses: actions/setup-node@v6 diff --git a/.github/actions/setup-rust/action.yml b/.github/actions/setup-rust/action.yml index 048bf750..d1017840 100644 --- a/.github/actions/setup-rust/action.yml +++ b/.github/actions/setup-rust/action.yml @@ -1,26 +1,31 @@ name: Setup Rust -description: Install Rust stable toolchain with cargo cache +description: Install pinned Rust toolchain with cargo cache inputs: + rust-version: + description: Rust toolchain version + required: false + default: "1.88.0" targets: description: Additional Rust targets to install (comma-separated) required: false - default: '' + default: "" cache-key: description: Extra cache key suffix for cargo required: false - default: 'default' + default: "default" cache-paths: description: Additional paths to cache (newline-separated), appended to ~/.cargo/registry and ~/.cargo/git required: false - default: 'target' + default: "target" runs: using: composite steps: - - name: Install Rust stable + - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable with: + toolchain: ${{ inputs.rust-version }} targets: ${{ inputs.targets }} - name: Cache cargo @@ -30,7 +35,8 @@ runs: ~/.cargo/registry ~/.cargo/git ${{ inputs.cache-paths }} - key: ${{ runner.os }}-cargo-${{ inputs.cache-key }}-${{ hashFiles('Cargo.lock') }} + key: ${{ runner.os }}-cargo-${{ inputs.rust-version }}-${{ inputs.cache-key }}-${{ hashFiles('Cargo.lock') }} restore-keys: | - ${{ runner.os }}-cargo-${{ inputs.cache-key }}- + ${{ runner.os }}-cargo-${{ inputs.rust-version }}-${{ inputs.cache-key }}- + ${{ runner.os }}-cargo-${{ inputs.rust-version }}- ${{ runner.os }}-cargo- diff --git a/.github/actions/setup-tauri/action.yml b/.github/actions/setup-tauri/action.yml index 40c1f27b..7b8e678e 100644 --- a/.github/actions/setup-tauri/action.yml +++ b/.github/actions/setup-tauri/action.yml @@ -2,18 +2,22 @@ name: Setup Tauri Build description: Common Tauri GUI build setup - cargo cache with deps-only hash, version sync, route generation inputs: + rust-version: + description: Rust toolchain version + required: false + default: "1.93.1" rust-targets: description: Rust targets to install (comma-separated) required: false - default: '' + default: "" signing-private-key: description: Tauri updater signing private key content required: false - default: '' + default: "" signing-private-key-password: description: Tauri updater signing private key password required: false - default: '' + default: "" version: description: Version string to sync into Cargo.toml and tauri.conf.json required: true @@ -45,9 +49,10 @@ runs: patchelf \ libssl-dev - - name: Install Rust stable + - name: Install Rust toolchain uses: dtolnay/rust-toolchain@stable with: + toolchain: ${{ inputs.rust-version }} targets: ${{ inputs.rust-targets }} - name: Normalize Tauri signing key @@ -86,8 +91,9 @@ runs: ~/.cargo/registry ~/.cargo/git target - key: ${{ runner.os }}-cargo-${{ steps.cargo-deps-hash.outputs.hash }} + key: ${{ runner.os }}-cargo-${{ inputs.rust-version }}-${{ steps.cargo-deps-hash.outputs.hash }} restore-keys: | + ${{ runner.os }}-cargo-${{ inputs.rust-version }}- ${{ runner.os }}-cargo- - name: Sync Tauri version diff --git a/.github/workflows/release-cli.yml b/.github/workflows/release-cli.yml index ff380eed..5c262982 100644 --- a/.github/workflows/release-cli.yml +++ b/.github/workflows/release-cli.yml @@ -1,10 +1,10 @@ name: Release Packages env: - NODE_VERSION: '25' + NODE_VERSION: "25.6.1" NPM_REGISTRY_URL: https://registry.npmjs.org/ - NPM_PUBLISH_VERIFY_ATTEMPTS: '90' - NPM_PUBLISH_VERIFY_DELAY_SECONDS: '10' + NPM_PUBLISH_VERIFY_ATTEMPTS: "90" + NPM_PUBLISH_VERIFY_DELAY_SECONDS: "10" CLI_NATIVE_MODULE_DIRS: | libraries/logger libraries/md-compiler @@ -239,7 +239,7 @@ jobs: - uses: actions/checkout@v6 - uses: ./.github/actions/setup-node-pnpm with: - install: 'false' + install: "false" - name: Setup npm registry uses: actions/setup-node@v6 with: diff --git a/Cargo.lock b/Cargo.lock index 5d843bf3..601b13e0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -214,6 +214,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "block-buffer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be" +dependencies = [ + "hybrid-array", +] + [[package]] name = "block2" version = "0.6.2" @@ -476,6 +485,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "const-oid" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6ef517f0926dd24a1582492c791b6a4818a4d94e789a334894aa15b0d12f55c" + [[package]] name = "convert_case" version = "0.4.0" @@ -550,6 +565,15 @@ dependencies = [ "libc", ] +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + [[package]] name = "crc32fast" version = "1.5.0" @@ -584,6 +608,15 @@ dependencies = [ "typenum", ] +[[package]] +name = "crypto-common" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710" +dependencies = [ + "hybrid-array", +] + [[package]] name = "cssparser" version = "0.29.6" @@ -623,9 +656,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.6.3" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "424e0138278faeb2b401f174ad17e715c829512d74f3d1e81eb43365c2e0590e" +checksum = "352d39c2f7bef1d6ad73db6f5160efcaed66d94ef8c6c573a8410c00bf909a98" dependencies = [ "ctor-proc-macro", "dtor", @@ -712,8 +745,19 @@ version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "block-buffer", - "crypto-common", + "block-buffer 0.10.4", + "crypto-common 0.1.7", +] + +[[package]] +name = "digest" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4850db49bf08e663084f7fb5c87d202ef91a3907271aff24a94eb97ff039153c" +dependencies = [ + "block-buffer 0.12.0", + "const-oid", + "crypto-common 0.2.1", ] [[package]] @@ -813,9 +857,9 @@ dependencies = [ [[package]] name = "dtor" -version = "0.1.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "404d02eeb088a82cfd873006cb713fe411306c7d182c344905e101fb1167d301" +checksum = "f1057d6c64987086ff8ed0fd3fbf377a6b7d205cc7715868cd401705f715cbe4" dependencies = [ "dtor-proc-macro", ] @@ -1518,6 +1562,15 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" +[[package]] +name = "hybrid-array" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8655f91cd07f2b9d0c24137bd650fe69617773435ee5ec83022377777ce65ef1" +dependencies = [ + "typenum", +] + [[package]] name = "hyper" version = "1.8.1" @@ -2094,7 +2147,7 @@ dependencies = [ [[package]] name = "memory-sync-gui" -version = "2026.10330.108" +version = "2026.10330.118" dependencies = [ "dirs", "proptest", @@ -2163,12 +2216,12 @@ dependencies = [ [[package]] name = "napi" -version = "3.8.3" +version = "3.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6944d0bf100571cd6e1a98a316cdca262deb6fccf8d93f5ae1502ca3fc88bd3" +checksum = "fb7848c221fb7bb789e02f01875287ebb1e078b92a6566a34de01ef8806e7c2b" dependencies = [ "bitflags 2.11.0", - "ctor 0.6.3", + "ctor 0.8.0", "futures", "napi-build", "napi-sys", @@ -2184,12 +2237,12 @@ checksum = "d376940fd5b723c6893cd1ee3f33abbfd86acb1cd1ec079f3ab04a2a3bc4d3b1" [[package]] name = "napi-derive" -version = "3.5.2" +version = "3.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c914b5e420182bfb73504e0607592cdb8e2e21437d450883077669fb72a114d" +checksum = "60867ff9a6f76e82350e0c3420cb0736f5866091b61d7d8a024baa54b0ec17dd" dependencies = [ "convert_case 0.11.0", - "ctor 0.6.3", + "ctor 0.8.0", "napi-derive-backend", "proc-macro2", "quote", @@ -2847,9 +2900,9 @@ dependencies = [ [[package]] name = "proptest" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" +checksum = "4b45fcc2344c680f5025fe57779faef368840d0bd1f42f216291f0dc4ace4744" dependencies = [ "bit-set", "bit-vec", @@ -3648,8 +3701,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", - "cpufeatures", - "digest", + "cpufeatures 0.2.17", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "446ba717509524cb3f22f17ecc096f10f4822d76ab5c0b9822c5f9c284e825f4" +dependencies = [ + "cfg-if", + "cpufeatures 0.3.0", + "digest 0.11.2", ] [[package]] @@ -4055,7 +4119,7 @@ dependencies = [ "semver", "serde", "serde_json", - "sha2", + "sha2 0.10.9", "syn 2.0.117", "tauri-utils", "thiserror 2.0.18", @@ -4372,7 +4436,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10330.108" +version = "2026.10330.118" dependencies = [ "clap", "dirs", @@ -4384,7 +4448,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "sha2", + "sha2 0.11.0", "tempfile", "thiserror 2.0.18", "tnmsc-logger", @@ -4394,7 +4458,7 @@ dependencies = [ [[package]] name = "tnmsc-logger" -version = "2026.10330.108" +version = "2026.10330.118" dependencies = [ "chrono", "napi", @@ -4406,7 +4470,7 @@ dependencies = [ [[package]] name = "tnmsc-md-compiler" -version = "2026.10330.108" +version = "2026.10330.118" dependencies = [ "markdown", "napi", @@ -4421,7 +4485,7 @@ dependencies = [ [[package]] name = "tnmsc-script-runtime" -version = "2026.10330.108" +version = "2026.10330.118" dependencies = [ "napi", "napi-build", @@ -5693,7 +5757,7 @@ dependencies = [ "once_cell", "percent-encoding", "raw-window-handle", - "sha2", + "sha2 0.10.9", "soup3", "tao-macros", "thiserror 2.0.18", diff --git a/Cargo.toml b/Cargo.toml index ab153449..6cc71d83 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,8 +9,9 @@ members = [ ] [workspace.package] -version = "2026.10330.108" +version = "2026.10330.118" edition = "2024" +rust-version = "1.88" license = "AGPL-3.0-only" authors = ["TrueNine"] repository = "https://github.com/TrueNine/memory-sync" @@ -35,7 +36,7 @@ dirs = "6.0.0" glob = "0.3.3" # Crypto & encoding -sha2 = "0.10" +sha2 = "0.11.0" base64 = "0.22.1" # HTTP @@ -45,8 +46,8 @@ reqwest = { version = "0.13.2", features = ["blocking", "json"] } markdown = "1.0.0" # NAPI-RS (Node.js native addon bindings) -napi = { version = "3.8.3", features = ["napi4"] } -napi-derive = "3.5.2" +napi = { version = "3.8.4", features = ["napi4"] } +napi-derive = "3.5.3" napi-build = "2.3.1" # Tauri diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7e6ebbaf..fec9e6a2 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -3,6 +3,7 @@ name = "tnmsc" description = "Cross-AI-tool prompt synchronisation CLI" version.workspace = true edition.workspace = true +rust-version.workspace = true license.workspace = true authors.workspace = true repository.workspace = true @@ -33,11 +34,11 @@ sha2 = { workspace = true } napi = { workspace = true, optional = true } napi-derive = { workspace = true, optional = true } reqwest = { version = "0.13.2", default-features = false, features = ["blocking", "json", "rustls"] } -globset = "0.4.16" +globset = "0.4.18" walkdir = "2.5.0" [dev-dependencies] -proptest = "1.10.0" +proptest = "1.11.0" tempfile = "3.27.0" [build-dependencies] diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 536295d6..e3cde4bc 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10330.108", + "version": "2026.10330.118", "os": [ "darwin" ], diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index a35474d3..da2cf2a0 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10330.108", + "version": "2026.10330.118", "os": [ "darwin" ], diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index e38b50cf..646ca5fc 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10330.108", + "version": "2026.10330.118", "os": [ "linux" ], diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 60b108c1..1a584e6c 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10330.108", + "version": "2026.10330.118", "os": [ "linux" ], diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 6cd15973..abba1832 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10330.108", + "version": "2026.10330.118", "os": [ "win32" ], diff --git a/cli/package.json b/cli/package.json index 5f5c09ed..3f7f6a72 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-cli", "type": "module", - "version": "2026.10330.108", + "version": "2026.10330.118", "description": "TrueNine Memory Synchronization CLI", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/src/aindex-config/AindexProjectConfig.ts b/cli/src/aindex-config/AindexProjectConfig.ts new file mode 100644 index 00000000..82ea42f5 --- /dev/null +++ b/cli/src/aindex-config/AindexProjectConfig.ts @@ -0,0 +1,29 @@ +/** + * Configuration for empty directory cleanup in aindex projects. + */ +export interface AindexEmptyDirCleanupConfig { + /** Git-style glob patterns to exclude from empty directory cleanup. */ + readonly exclude?: readonly string[] +} + +/** + * Project-level configuration for aindex. + * This is loaded from aindex/aindex.config.ts + */ +export interface AindexProjectConfig { + readonly emptyDirCleanup?: AindexEmptyDirCleanupConfig +} + +export interface AindexProjectConfigLoadResult { + readonly config: AindexProjectConfig + readonly source: string | null + readonly found: boolean +} + +export const DEFAULT_EMPTY_DIR_CLEANUP_CONFIG: AindexEmptyDirCleanupConfig = { + exclude: [] +} + +export function defineAindexProjectConfig(config: AindexProjectConfig): AindexProjectConfig { + return config +} diff --git a/cli/src/aindex-config/AindexProjectConfigLoader.ts b/cli/src/aindex-config/AindexProjectConfigLoader.ts new file mode 100644 index 00000000..b77d388f --- /dev/null +++ b/cli/src/aindex-config/AindexProjectConfigLoader.ts @@ -0,0 +1,88 @@ +import type {ILogger} from '@truenine/logger' +import type {AindexProjectConfig, AindexProjectConfigLoadResult} from './AindexProjectConfig' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {createLogger} from '@truenine/logger' + +const CONFIG_FILE_NAMES = ['aindex.config.ts', 'aindex.config.mts', 'aindex.config.cts', 'aindex.config.js', 'aindex.config.mjs', 'aindex.config.cjs'] + +const DEFAULT_CONFIG: AindexProjectConfig = { + emptyDirCleanup: { + exclude: [] + } +} + +export class AindexProjectConfigLoader { + private readonly logger: ILogger + + constructor() { + this.logger = createLogger('AindexProjectConfigLoader') + } + + async loadFromDirectory(dirPath: string): Promise { + for (const configName of CONFIG_FILE_NAMES) { + const configPath = path.join(dirPath, configName) + if (fs.existsSync(configPath)) { + return this.loadFromFile(configPath) + } + } + return {config: DEFAULT_CONFIG, source: null, found: false} + } + + async loadFromFile(filePath: string): Promise { + try { + const resolvedPath = path.resolve(filePath) + + if (!fs.existsSync(resolvedPath)) { + return {config: DEFAULT_CONFIG, source: null, found: false} + } + + const mod = (await import(resolvedPath)) as Record + const rawConfig = mod != null && typeof mod === 'object' ? 'default' in mod ? mod['default'] : 'config' in mod ? mod['config'] : mod : mod + + const config = this.normalizeConfig(rawConfig) + this.logger.debug('aindex project config loaded', {source: resolvedPath}) + return {config, source: resolvedPath, found: true} + } catch (error) { + this.logger.warn({ + code: 'AINDEX_CONFIG_LOAD_FAILED', + title: 'aindex project config load failed', + rootCause: [error instanceof Error ? error.message : String(error)], + details: {path: filePath} + }) + return {config: DEFAULT_CONFIG, source: null, found: false} + } + } + + private normalizeConfig(raw: unknown): AindexProjectConfig { + if (raw == null || typeof raw !== 'object') return DEFAULT_CONFIG + const obj = raw as Record + + const edc = obj['emptyDirCleanup'] + if (edc != null && typeof edc !== 'object') return {} + + const edcObj = edc as Record + return { + emptyDirCleanup: { + exclude: toStringArray(edcObj['exclude']) + } + } + } +} + +function toStringArray(val: unknown): string[] { + if (Array.isArray(val)) return val.filter((x): x is string => typeof x === 'string') + if (typeof val === 'string') return [val] + return [] +} + +let defaultLoader: AindexProjectConfigLoader | null = null + +export function getAindexProjectConfigLoader(): AindexProjectConfigLoader { + defaultLoader ??= new AindexProjectConfigLoader() + return defaultLoader +} + +export async function loadAindexProjectConfig(dirPath: string): Promise { + return getAindexProjectConfigLoader().loadFromDirectory(dirPath) +} diff --git a/cli/src/aindex-config/index.ts b/cli/src/aindex-config/index.ts new file mode 100644 index 00000000..9489c4fc --- /dev/null +++ b/cli/src/aindex-config/index.ts @@ -0,0 +1,2 @@ +export * from './AindexProjectConfig' +export * from './AindexProjectConfigLoader' diff --git a/cli/src/bridge/node.rs b/cli/src/bridge/node.rs index 5a4dee04..01e9e7c0 100644 --- a/cli/src/bridge/node.rs +++ b/cli/src/bridge/node.rs @@ -81,12 +81,12 @@ fn detect_plugin_runtime() -> Option { let mut candidates: Vec = Vec::new(); // Relative to binary location - if let Ok(exe) = std::env::current_exe() { - if let Some(exe_dir) = exe.parent() { - candidates.push(exe_dir.join("plugin-runtime.mjs")); - candidates.push(exe_dir.join("../dist/plugin-runtime.mjs")); - candidates.push(exe_dir.join("../cli/dist/plugin-runtime.mjs")); - } + if let Ok(exe) = std::env::current_exe() + && let Some(exe_dir) = exe.parent() + { + candidates.push(exe_dir.join("plugin-runtime.mjs")); + candidates.push(exe_dir.join("../dist/plugin-runtime.mjs")); + candidates.push(exe_dir.join("../cli/dist/plugin-runtime.mjs")); } // Relative to CWD @@ -181,15 +181,15 @@ fn run_silent(cmd: &str, args: &[&str]) -> Option { #[cfg(feature = "embedded-runtime")] const EMBEDDED_RUNTIME: &str = include_str!(concat!(env!("OUT_DIR"), "/plugin-runtime.mjs")); +/// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. #[cfg(not(feature = "embedded-runtime"))] -const EMBEDDED_RUNTIME: &str = ""; +fn extract_embedded_runtime() -> Option { + None +} /// Extract embedded JS to `~/.aindex/.cache/plugin-runtime-.mjs`. +#[cfg(feature = "embedded-runtime")] fn extract_embedded_runtime() -> Option { - if EMBEDDED_RUNTIME.is_empty() { - return None; - } - let version = env!("CARGO_PKG_VERSION"); let cache_dir = dirs::home_dir()?.join(".aindex/.cache"); let cache_file = cache_dir.join(format!("plugin-runtime-{version}.mjs")); @@ -469,12 +469,12 @@ pub fn run_node_fallback(args: &[String]) -> ExitCode { fn find_index_mjs() -> Option { let candidates: Vec = { let mut c = Vec::new(); - if let Ok(exe) = std::env::current_exe() { - if let Some(exe_dir) = exe.parent() { - c.push(exe_dir.join("index.mjs")); - c.push(exe_dir.join("../dist/index.mjs")); - c.push(exe_dir.join("../cli/dist/index.mjs")); - } + if let Ok(exe) = std::env::current_exe() + && let Some(exe_dir) = exe.parent() + { + c.push(exe_dir.join("index.mjs")); + c.push(exe_dir.join("../dist/index.mjs")); + c.push(exe_dir.join("../cli/dist/index.mjs")); } if let Ok(cwd) = std::env::current_dir() { c.push(cwd.join("dist/index.mjs")); diff --git a/cli/src/cleanup/empty-directories.ts b/cli/src/cleanup/empty-directories.ts index 1491ba9b..5ea8a881 100644 --- a/cli/src/cleanup/empty-directories.ts +++ b/cli/src/cleanup/empty-directories.ts @@ -16,7 +16,8 @@ const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES = new Set([ '.pnpm-store', '.yarn', '.idea', - '.vscode' + '.volumes', + 'volumes' ]) export interface WorkspaceEmptyDirectoryPlan { @@ -37,7 +38,9 @@ function shouldSkipEmptyDirectoryTree( currentDir: string ): boolean { if (currentDir === workspaceDir) return false - return EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.has(nodePath.basename(currentDir)) + return EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.has( + nodePath.basename(currentDir) + ) } export function planWorkspaceEmptyDirectoryCleanup( @@ -48,27 +51,34 @@ export function planWorkspaceEmptyDirectoryCleanup( const dirsToDelete = new Set(options.dirsToDelete.map(resolveAbsolutePath)) const emptyDirsToDelete = new Set() + // Track which directories are scheduled for deletion (dirsToDelete + emptyDirsToDelete) + const isScheduledForDeletion = (dirPath: string): boolean => dirsToDelete.has(dirPath) || emptyDirsToDelete.has(dirPath) + const collectEmptyDirectories = (currentDir: string): boolean => { - if (dirsToDelete.has(currentDir)) return true - if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, currentDir)) return false + if (isScheduledForDeletion(currentDir)) return true + if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, currentDir)) + { return false } let entries: fs.Dirent[] try { entries = options.fs.readdirSync(currentDir, {withFileTypes: true}) - } - catch { + } catch { return false } let hasRetainedEntries = false for (const entry of entries) { - const entryPath = resolveAbsolutePath(options.path.join(currentDir, entry.name)) + const entryPath = resolveAbsolutePath( + options.path.join(currentDir, entry.name) + ) - if (dirsToDelete.has(entryPath)) continue + if (isScheduledForDeletion(entryPath)) continue if (entry.isDirectory()) { - if (shouldSkipEmptyDirectoryTree(options.path, workspaceDir, entryPath)) { + if ( + shouldSkipEmptyDirectoryTree(options.path, workspaceDir, entryPath) + ) { hasRetainedEntries = true continue } @@ -89,9 +99,16 @@ export function planWorkspaceEmptyDirectoryCleanup( return !hasRetainedEntries } - collectEmptyDirectories(workspaceDir) + // Iteratively collect empty directories until no new ones are found + // This handles the case where deleting a child directory makes its parent empty + let previousSize = -1 + while (emptyDirsToDelete.size !== previousSize) { + previousSize = emptyDirsToDelete.size + collectEmptyDirectories(workspaceDir) + } return { - emptyDirsToDelete: [...emptyDirsToDelete].sort((a, b) => a.localeCompare(b)) + emptyDirsToDelete: [...emptyDirsToDelete].sort((a, b) => + a.localeCompare(b)) } } diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 4b7f63ac..ab9c5815 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -175,18 +175,18 @@ fn parse_key_value_pairs(args: &ConfigArgs) -> Vec<(String, String)> { let mut pairs = Vec::new(); for s in &args.set { - if let Some(eq_idx) = s.find('=') { - if eq_idx > 0 { - pairs.push((s[..eq_idx].to_string(), s[eq_idx + 1..].to_string())); - } + if let Some(eq_idx) = s.find('=') + && eq_idx > 0 + { + pairs.push((s[..eq_idx].to_string(), s[eq_idx + 1..].to_string())); } } for s in &args.positional { - if let Some(eq_idx) = s.find('=') { - if eq_idx > 0 { - pairs.push((s[..eq_idx].to_string(), s[eq_idx + 1..].to_string())); - } + if let Some(eq_idx) = s.find('=') + && eq_idx > 0 + { + pairs.push((s[..eq_idx].to_string(), s[eq_idx + 1..].to_string())); } } diff --git a/cli/src/commands/CleanupUtils.adapter.test.ts b/cli/src/commands/CleanupUtils.adapter.test.ts index 2976dcca..069ea3ab 100644 --- a/cli/src/commands/CleanupUtils.adapter.test.ts +++ b/cli/src/commands/CleanupUtils.adapter.test.ts @@ -45,15 +45,17 @@ function createCleanContext(workspaceDir: string): OutputCleanContext { getDirectoryName: () => path.basename(workspaceDir), getAbsolutePath: () => workspaceDir }, - projects: [{ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } } - }] + ] }, aindexDir: path.join(workspaceDir, 'aindex') } @@ -86,25 +88,29 @@ describe('cleanupUtils native adapter', () => { nativeBindingMocks.planCleanup.mockReset() nativeBindingMocks.performCleanup.mockReset() - nativeBindingMocks.planCleanup.mockReturnValue(JSON.stringify({ - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - violations: [], - conflicts: [], - excludedScanGlobs: ['**/.git/**'] - })) - nativeBindingMocks.performCleanup.mockReturnValue(JSON.stringify({ - deletedFiles: 1, - deletedDirs: 2, - errors: [], - violations: [], - conflicts: [], - filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills/legacy'], - emptyDirsToDelete: ['/tmp/.codex/skills'], - excludedScanGlobs: ['**/.git/**'] - })) + nativeBindingMocks.planCleanup.mockReturnValue( + JSON.stringify({ + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], + violations: [], + conflicts: [], + excludedScanGlobs: ['**/.git/**'] + }) + ) + nativeBindingMocks.performCleanup.mockReturnValue( + JSON.stringify({ + deletedFiles: 1, + deletedDirs: 2, + errors: [], + violations: [], + conflicts: [], + filesToDelete: ['/tmp/project-a/AGENTS.md'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], + excludedScanGlobs: ['**/.git/**'] + }) + ) const {collectDeletionTargets, hasNativeCleanupBinding, performCleanup} = await cleanupModulePromise const workspaceDir = path.resolve('tmp-native-cleanup-adapter') @@ -116,7 +122,8 @@ describe('cleanupUtils native adapter', () => { const plan = await collectDeletionTargets([plugin], cleanCtx) expect(plan).toEqual({ filesToDelete: ['/tmp/project-a/AGENTS.md'], - dirsToDelete: ['/tmp/.codex/skills', '/tmp/.codex/skills/legacy'], + dirsToDelete: ['/tmp/.codex/skills/legacy'], + emptyDirsToDelete: ['/tmp/.codex/skills'], violations: [], conflicts: [], excludedScanGlobs: ['**/.git/**'] @@ -139,7 +146,7 @@ describe('cleanupUtils native adapter', () => { const result = await performCleanup([plugin], cleanCtx, createMockLogger()) expect(result).toEqual({ deletedFiles: 1, - deletedDirs: 2, + deletedDirs: 3, errors: [], violations: [], conflicts: [] diff --git a/cli/src/commands/CleanupUtils.test.ts b/cli/src/commands/CleanupUtils.test.ts index 5f26e97a..9d4f9f62 100644 --- a/cli/src/commands/CleanupUtils.test.ts +++ b/cli/src/commands/CleanupUtils.test.ts @@ -5,11 +5,7 @@ import * as path from 'node:path' import glob from 'fast-glob' import {describe, expect, it} from 'vitest' import {mergeConfig} from '../config' -import { - FilePathKind, - IDEKind, - PluginKind -} from '../plugins/plugin-core' +import {FilePathKind, IDEKind, PluginKind} from '../plugins/plugin-core' import {collectDeletionTargets, performCleanup} from './CleanupUtils' function createMockLogger(): ILogger { @@ -91,22 +87,26 @@ describe('collectDeletionTargets', () => { const ignoreSource = path.resolve('tmp-aindex/public/.cursorignore') const ctx = createCleanContext({ - editorConfigFiles: [{ - type: IDEKind.EditorConfig, - content: 'root = true', - length: 11, - filePathKind: FilePathKind.Absolute, - dir: { - pathKind: FilePathKind.Absolute, - path: editorSource, - getDirectoryName: () => '.editorconfig' + editorConfigFiles: [ + { + type: IDEKind.EditorConfig, + content: 'root = true', + length: 11, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: editorSource, + getDirectoryName: () => '.editorconfig' + } + } + ], + aiAgentIgnoreConfigFiles: [ + { + fileName: '.cursorignore', + content: 'node_modules', + sourcePath: ignoreSource } - }], - aiAgentIgnoreConfigFiles: [{ - fileName: '.cursorignore', - content: 'node_modules', - sourcePath: ignoreSource - }] + ] }) const plugin = createMockOutputPlugin('MockOutputPlugin', [editorSource, ignoreSource]) @@ -140,17 +140,13 @@ describe('collectDeletionTargets', () => { const ruleDir = path.join(claudeBaseDir, 'rules') const ruleFile = path.join(ruleDir, 'a.md') const ctx = createCleanContext() - const plugin = createMockOutputPlugin( - 'MockOutputPlugin', - [ruleFile], - { - delete: [ - {kind: 'directory', path: claudeBaseDir}, - {kind: 'directory', path: ruleDir}, - {kind: 'file', path: ruleFile} - ] - } - ) + const plugin = createMockOutputPlugin('MockOutputPlugin', [ruleFile], { + delete: [ + {kind: 'directory', path: claudeBaseDir}, + {kind: 'directory', path: ruleDir}, + {kind: 'file', path: ruleFile} + ] + }) const result = await collectDeletionTargets([plugin], ctx) @@ -163,19 +159,13 @@ describe('collectDeletionTargets', () => { const promptsDir = path.join(codexBaseDir, 'prompts') const protectedSystemDir = path.join(codexBaseDir, 'skills', '.system') const ctx = createCleanContext() - const plugin = createMockOutputPlugin( - 'MockOutputPlugin', - [], - { - delete: [ - {kind: 'directory', path: codexBaseDir}, - {kind: 'directory', path: promptsDir} - ], - protect: [ - {kind: 'directory', path: protectedSystemDir} - ] - } - ) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [ + {kind: 'directory', path: codexBaseDir}, + {kind: 'directory', path: promptsDir} + ], + protect: [{kind: 'directory', path: protectedSystemDir}] + }) const result = await collectDeletionTargets([plugin], ctx) @@ -186,23 +176,21 @@ describe('collectDeletionTargets', () => { it('blocks deleting dangerous roots and returns the most specific matching rule', async () => { const homeDir = os.homedir() const ctx = createCleanContext() - const plugin = createMockOutputPlugin( - 'MockOutputPlugin', - [], - { - delete: [{kind: 'directory', path: homeDir}] - } - ) + const plugin = createMockOutputPlugin('MockOutputPlugin', [], { + delete: [{kind: 'directory', path: homeDir}] + }) const result = await collectDeletionTargets([plugin], ctx) expect(result.dirsToDelete).toEqual([]) expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([expect.objectContaining({ - targetPath: path.resolve(homeDir), - protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'), - protectionMode: 'direct' - })]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(homeDir), + protectedPath: path.resolve('tmp-cleanup-utils-workspace', 'knowladge'), + protectionMode: 'direct' + }) + ]) }) it('throws when an output path matches a built-in protected path before directory guards run', async () => { @@ -219,32 +207,32 @@ describe('collectDeletionTargets', () => { getDirectoryName: () => path.basename(workspaceDir), getAbsolutePath: () => workspaceDir }, - projects: [{ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => projectRoot + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => projectRoot + } } - }] + ] }, aindexDir }) - const plugin = createMockOutputPlugin( - 'MockOutputPlugin', - [globalConfigPath], - { - delete: [ - {kind: 'directory', path: globalAindexDir}, - {kind: 'directory', path: workspaceDir}, - {kind: 'directory', path: projectRoot}, - {kind: 'directory', path: aindexDir} - ] - } - ) + const plugin = createMockOutputPlugin('MockOutputPlugin', [globalConfigPath], { + delete: [ + {kind: 'directory', path: globalAindexDir}, + {kind: 'directory', path: workspaceDir}, + {kind: 'directory', path: projectRoot}, + {kind: 'directory', path: aindexDir} + ] + }) - await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow(`Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}`) + await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow( + `Cleanup protection conflict: 1 output path(s) are also protected: ${path.resolve(globalConfigPath)}` + ) }) it('allows deleting non-mdx files under dist while blocking reserved dist mdx files', async () => { @@ -273,22 +261,21 @@ describe('collectDeletionTargets', () => { getDirectoryName: () => path.basename(workspaceDir), getAbsolutePath: () => workspaceDir }, - projects: [{ - dirFromWorkspacePath: { - pathKind: FilePathKind.Relative, - path: 'project-a', - basePath: workspaceDir, - getDirectoryName: () => 'project-a', - getAbsolutePath: () => path.join(workspaceDir, 'project-a') + projects: [ + { + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceDir, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceDir, 'project-a') + } } - }] + ] }, aindexDir: path.join(workspaceDir, 'aindex') }) - const plugin = createMockOutputPlugin('MockOutputPlugin', [ - projectChildFile, - safeDistMarkdownFile - ], { + const plugin = createMockOutputPlugin('MockOutputPlugin', [projectChildFile, safeDistMarkdownFile], { delete: [ {kind: 'file', path: protectedDistMdxFile}, {kind: 'directory', path: globalChildDir}, @@ -298,25 +285,20 @@ describe('collectDeletionTargets', () => { const result = await collectDeletionTargets([plugin], ctx) - expect(new Set(result.filesToDelete)).toEqual(new Set([ - path.resolve(projectChildFile), - path.resolve(safeDistMarkdownFile) - ])) - expect(new Set(result.dirsToDelete)).toEqual(new Set([ - path.resolve(globalChildDir), - path.resolve(aindexSourceDir), - path.resolve(workspaceDir, 'project-a') - ])) - expect(result.violations).toEqual(expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedDistMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(aindexSourceDir)}) - ])) - } - finally { + expect(new Set(result.filesToDelete)).toEqual(new Set([path.resolve(projectChildFile), path.resolve(safeDistMarkdownFile)])) + const allDirsToDelete = [...result.dirsToDelete, ...result.emptyDirsToDelete] + expect(new Set(allDirsToDelete)).toEqual(new Set([path.resolve(globalChildDir), path.resolve(aindexSourceDir), path.resolve(workspaceDir, 'project-a')])) + expect(result.violations).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + targetPath: path.resolve(protectedDistMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + }), + expect.objectContaining({targetPath: path.resolve(aindexSourceDir)}) + ]) + ) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -351,13 +333,14 @@ describe('collectDeletionTargets', () => { expect(result.dirsToDelete).toEqual([]) expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([expect.objectContaining({ - targetPath: path.resolve(distCommandDir), - protectionMode: 'direct', - protectedPath: path.resolve(protectedDistMdxFile) - })]) - } - finally { + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(distCommandDir), + protectionMode: 'direct', + protectedPath: path.resolve(protectedDistMdxFile) + }) + ]) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -393,16 +376,17 @@ describe('collectDeletionTargets', () => { const result = await collectDeletionTargets([plugin], ctx) expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual(expect.arrayContaining([ - expect.objectContaining({ - targetPath: path.resolve(protectedAppMdxFile), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - }), - expect.objectContaining({targetPath: path.resolve(safeAppMarkdownFile)}) - ])) - } - finally { + expect(result.violations).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + targetPath: path.resolve(protectedAppMdxFile), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + }), + expect.objectContaining({targetPath: path.resolve(safeAppMarkdownFile)}) + ]) + ) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -447,13 +431,14 @@ describe('collectDeletionTargets', () => { expect(result.dirsToDelete).toEqual([]) expect(result.filesToDelete).toEqual([]) - expect(result.violations).toEqual([expect.objectContaining({ - targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), - protectionMode: 'direct', - protectedPath: path.resolve(protectedAppMdxFile) - })]) - } - finally { + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(path.join(workspaceDir, 'aindex', 'app')), + protectionMode: 'direct', + protectedPath: path.resolve(protectedAppMdxFile) + }) + ]) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -487,13 +472,14 @@ describe('collectDeletionTargets', () => { const result = await collectDeletionTargets([plugin], ctx) expect(result.dirsToDelete).toEqual([]) - expect(result.violations).toEqual([expect.objectContaining({ - targetPath: path.resolve(symlinkPath), - protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), - protectionMode: 'direct' - })]) - } - finally { + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(symlinkPath), + protectedPath: path.resolve(path.join(workspaceDir, 'knowladge')), + protectionMode: 'direct' + }) + ]) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -525,11 +511,13 @@ describe('collectDeletionTargets', () => { const result = await collectDeletionTargets([plugin], ctx) expect(result.filesToDelete).toEqual([path.resolve(directChildFile)]) - expect(result.violations).toEqual([expect.objectContaining({ - targetPath: path.resolve(recursiveChildFile), - protectionMode: 'recursive', - protectedPath: path.resolve(recursiveProtectedDir) - })]) + expect(result.violations).toEqual([ + expect.objectContaining({ + targetPath: path.resolve(recursiveChildFile), + protectionMode: 'recursive', + protectedPath: path.resolve(recursiveProtectedDir) + }) + ]) }) it('skips delete glob matches covered by excludeScanGlobs while still deleting other sibling directories', async () => { @@ -556,8 +544,7 @@ describe('collectDeletionTargets', () => { expect(result.dirsToDelete).toEqual([path.resolve(staleDir)]) expect(result.filesToDelete).toEqual([]) expect(result.violations).toEqual([]) - } - finally { + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -599,8 +586,7 @@ describe('collectDeletionTargets', () => { const plugin = createMockOutputPlugin('MockOutputPlugin', [workspacePromptSource]) await expect(collectDeletionTargets([plugin], ctx)).rejects.toThrow('Cleanup protection conflict') - } - finally { + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -646,17 +632,14 @@ describe('collectDeletionTargets', () => { const result = await collectDeletionTargets([plugin], ctx) expect(result.filesToDelete).toEqual([]) - expect(result.dirsToDelete).toEqual([ - path.resolve(workspaceDir, 'source', 'empty'), - path.resolve(sourceLeafDir) - ]) - expect(result.dirsToDelete).not.toContain(path.resolve(workspaceDir)) - expect(result.dirsToDelete).not.toContain(path.resolve(distEmptyDir)) - expect(result.dirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir)) - expect(result.dirsToDelete).not.toContain(path.resolve(gitEmptyDir)) - expect(result.dirsToDelete).not.toContain(path.resolve(symlinkParentDir)) - } - finally { + expect(result.dirsToDelete).toEqual([]) + expect(result.emptyDirsToDelete).toEqual([path.resolve(workspaceDir, 'source', 'empty'), path.resolve(sourceLeafDir)]) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(workspaceDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(distEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(nodeModulesEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(gitEmptyDir)) + expect(result.emptyDirsToDelete).not.toContain(path.resolve(symlinkParentDir)) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -692,19 +675,20 @@ describe('performCleanup', () => { const result = await performCleanup([plugin], ctx, createMockLogger()) - expect(result).toEqual(expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - })) + expect(result).toEqual( + expect.objectContaining({ + deletedFiles: 1, + deletedDirs: 3, + errors: [], + violations: [], + conflicts: [] + }) + ) expect(fs.existsSync(outputFile)).toBe(false) expect(fs.existsSync(outputDir)).toBe(false) expect(fs.existsSync(path.dirname(outputFile))).toBe(false) expect(fs.existsSync(path.dirname(outputDir))).toBe(false) - } - finally { + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -739,15 +723,12 @@ describe('performCleanup', () => { await performCleanup([plugin], ctx, logger) - expect(logger.debugMessages).toEqual(expect.arrayContaining([ - 'cleanup plan built', - 'cleanup delete execution started', - 'cleanup delete execution complete' - ])) + expect(logger.debugMessages).toEqual( + expect.arrayContaining(['cleanup plan built', 'cleanup delete execution started', 'cleanup delete execution complete']) + ) expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputFile})) expect(logger.debugMessages).not.toContainEqual(expect.objectContaining({path: outputDir})) - } - finally { + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -780,20 +761,21 @@ describe('performCleanup', () => { const result = await performCleanup([plugin], ctx, createMockLogger()) - expect(result).toEqual(expect.objectContaining({ - deletedFiles: 1, - deletedDirs: 3, - errors: [], - violations: [], - conflicts: [] - })) + expect(result).toEqual( + expect.objectContaining({ + deletedFiles: 1, + deletedDirs: 3, + errors: [], + violations: [], + conflicts: [] + }) + ) expect(fs.existsSync(outputFile)).toBe(false) expect(fs.existsSync(path.dirname(outputFile))).toBe(false) expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty', 'leaf'))).toBe(false) expect(fs.existsSync(path.join(tempDir, 'scratch', 'empty'))).toBe(false) expect(fs.existsSync(path.join(tempDir, 'scratch'))).toBe(true) - } - finally { + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) diff --git a/cli/src/commands/CleanupUtils.ts b/cli/src/commands/CleanupUtils.ts index 5edd3651..9c80a82c 100644 --- a/cli/src/commands/CleanupUtils.ts +++ b/cli/src/commands/CleanupUtils.ts @@ -9,6 +9,7 @@ import type { } from '../plugins/plugin-core' import type {ProtectionMode, ProtectionRuleMatcher} from '../ProtectedDeletionGuard' import {buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines} from '@/diagnostics' +import {loadAindexProjectConfig} from '../aindex-config/AindexProjectConfigLoader' import {getNativeBinding} from '../core/native-binding' import {collectAllPluginOutputs} from '../plugins/plugin-core' import { @@ -99,6 +100,7 @@ interface NativeCleanupSnapshot { readonly projectRoots: readonly string[] readonly protectedRules: readonly NativeProtectedRule[] readonly pluginSnapshots: readonly NativePluginCleanupSnapshot[] + readonly emptyDirExcludeGlobs?: readonly string[] } interface NativeProtectedPathViolation { @@ -324,12 +326,25 @@ async function buildCleanupSnapshot( protectedRules.push(...collectConfiguredCleanupProtectionRules(cleanCtx)) + // Load aindex project config (aindex.config.ts) for empty-dir exclude globs + let emptyDirExcludeGlobs: string[] | undefined + if (cleanCtx.collectedOutputContext.aindexDir != null) { + const aindexConfig = await loadAindexProjectConfig(cleanCtx.collectedOutputContext.aindexDir) + if (aindexConfig.found) { + const exclude = aindexConfig.config.emptyDirCleanup?.exclude + if (exclude != null && exclude.length > 0) { + emptyDirExcludeGlobs = [...exclude] + } + } + } + return { workspaceDir: cleanCtx.collectedOutputContext.workspace.directory.path, ...cleanCtx.collectedOutputContext.aindexDir != null ? {aindexDir: cleanCtx.collectedOutputContext.aindexDir} : {}, projectRoots: collectProjectRoots(cleanCtx.collectedOutputContext), protectedRules, - pluginSnapshots + pluginSnapshots, + ...emptyDirExcludeGlobs != null && emptyDirExcludeGlobs.length > 0 ? {emptyDirExcludeGlobs} : {} } } @@ -439,7 +454,7 @@ export async function performCleanup( return { deletedFiles: result.deletedFiles, - deletedDirs: result.deletedDirs, + deletedDirs: result.deletedDirs + result.emptyDirsToDelete.length, errors: loggedErrors, violations: [], conflicts: [] diff --git a/cli/src/commands/DryRunCleanCommand.ts b/cli/src/commands/DryRunCleanCommand.ts index 67a9dfb0..72ce58c5 100644 --- a/cli/src/commands/DryRunCleanCommand.ts +++ b/cli/src/commands/DryRunCleanCommand.ts @@ -25,7 +25,8 @@ export class DryRunCleanCommand implements Command { globalFiles: outputs.globalFiles.length }) - const {filesToDelete, dirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) + const {filesToDelete, dirsToDelete, emptyDirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) + const totalDirsToDelete = [...dirsToDelete, ...emptyDirsToDelete] if (violations.length > 0) { logProtectedDeletionGuardError(logger, 'dry-run-cleanup', violations) @@ -38,12 +39,12 @@ export class DryRunCleanCommand implements Command { } this.logDryRunFiles(filesToDelete, logger) - this.logDryRunDirectories(dirsToDelete, logger) + this.logDryRunDirectories(totalDirsToDelete, logger) logger.info('clean complete', { dryRun: true, filesAffected: filesToDelete.length, - dirsAffected: dirsToDelete.length, + dirsAffected: totalDirsToDelete.length, violations: 0, excludedScanGlobs }) @@ -51,7 +52,7 @@ export class DryRunCleanCommand implements Command { return { success: true, filesAffected: filesToDelete.length, - dirsAffected: dirsToDelete.length, + dirsAffected: totalDirsToDelete.length, message: 'Dry-run complete, no files were deleted' } } diff --git a/cli/src/core/cleanup.rs b/cli/src/core/cleanup.rs index e7d5afb5..367b79c9 100644 --- a/cli/src/core/cleanup.rs +++ b/cli/src/core/cleanup.rs @@ -18,7 +18,7 @@ const DEFAULT_CLEANUP_SCAN_EXCLUDE_GLOBS: [&str; 6] = [ "**/.next/**", ]; -const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 15] = [ +const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 17] = [ ".git", "node_modules", "dist", @@ -34,6 +34,8 @@ const EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES: [&str; 15] = [ ".yarn", ".idea", ".vscode", + ".volumes", + "volumes", ]; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] @@ -119,6 +121,10 @@ pub struct CleanupSnapshot { pub protected_rules: Vec, #[serde(default)] pub plugin_snapshots: Vec, + /// Glob patterns from aindex.config.ts that should be excluded from + /// the empty-directory scanner (git-style ** patterns supported). + #[serde(default)] + pub empty_dir_exclude_globs: Vec, } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -264,6 +270,18 @@ fn normalize_glob_pattern(pattern: &str) -> String { path_to_glob_string(&resolve_absolute_path(pattern)) } +fn normalize_relative_glob_pattern(pattern: &str) -> String { + let normalized = pattern.replace('\\', "/"); + let normalized = normalized.trim_start_matches("./"); + normalized.trim_start_matches('/').to_string() +} + +fn normalize_workspace_relative_path(path: &Path, workspace_dir: &Path) -> Option { + let relative = path.strip_prefix(workspace_dir).ok()?; + let relative = path_to_glob_string(relative); + Some(relative.trim_start_matches('/').to_string()) +} + fn normalize_for_comparison(raw_path: &str) -> String { let normalized = path_to_string(&resolve_absolute_path(raw_path)); if cfg!(windows) { @@ -430,63 +448,260 @@ fn detect_glob_scan_root(pattern: &str) -> PathBuf { resolve_absolute_path(scan_root) } -fn expand_glob(pattern: &str, ignore_globs: &[String]) -> Result, String> { - let normalized_pattern = normalize_glob_pattern(pattern); - let matcher = build_globset(std::slice::from_ref(&normalized_pattern))? - .ok_or_else(|| "failed to compile cleanup glob".to_string())?; - let ignore_matcher = build_globset(ignore_globs)?; +/// A group of glob patterns that share the same scan root and ignore globs. +/// All patterns in the group are evaluated in a single directory walk. +#[derive(Debug, Clone)] +struct GlobGroup { + scan_root: PathBuf, + pattern_indices: Vec, +} + +/// Metadata associated with each glob pattern for result fan-out. +#[derive(Debug, Clone)] +struct GlobTargetMetadata { + is_protected: bool, + target_index: usize, + exclude_basenames: Vec, +} + +type GlobMatchResults = Vec<(usize, Vec)>; +type BatchedGlobExecutionResult = (GlobMatchResults, GlobMatchResults); + +/// Batched glob planner that groups patterns by scan root and ignore set. +/// This reduces the number of directory walks from O(patterns) to O(unique scan roots). +#[derive(Debug)] +struct BatchedGlobPlanner { + ignore_matcher: Option, + groups: Vec, + normalized_patterns: Vec, + metadata: Vec, +} + +impl BatchedGlobPlanner { + fn new(ignore_globs: &[String]) -> Result { + Ok(Self { + ignore_matcher: build_globset(ignore_globs)?, + groups: Vec::new(), + normalized_patterns: Vec::new(), + metadata: Vec::new(), + }) + } - if !has_glob_magic(&normalized_pattern) { - let absolute_path = resolve_absolute_path(&normalized_pattern); - if !absolute_path.exists() { - return Ok(vec![]); + /// Add a glob pattern to the planner with its associated metadata. + fn add_pattern( + &mut self, + pattern: &str, + is_protected: bool, + target_index: usize, + exclude_basenames: Vec, + ) { + let normalized = normalize_glob_pattern(pattern); + let pattern_index = self.normalized_patterns.len(); + self.normalized_patterns.push(normalized.clone()); + self.metadata.push(GlobTargetMetadata { + is_protected, + target_index, + exclude_basenames, + }); + + // Non-glob patterns (literal paths) don't need directory scanning + if !has_glob_magic(&normalized) { + return; } - let candidate = path_to_glob_string(&absolute_path); - if ignore_matcher - .as_ref() - .is_some_and(|compiled| compiled.is_match(&candidate)) + + let scan_root = detect_glob_scan_root(&normalized); + let scan_root_str = path_to_string(&scan_root); + + // Find or create a group for this scan root + if let Some(group) = self + .groups + .iter_mut() + .find(|g| path_to_string(&g.scan_root) == scan_root_str) { - return Ok(vec![]); - } - if matcher.is_match(&candidate) { - return Ok(vec![path_to_string(&absolute_path)]); + group.pattern_indices.push(pattern_index); + } else { + self.groups.push(GlobGroup { + scan_root, + pattern_indices: vec![pattern_index], + }); } - return Ok(vec![]); } - let scan_root = detect_glob_scan_root(&normalized_pattern); - if !scan_root.exists() { - return Ok(vec![]); - } + /// Execute the batched glob expansion and fan results back to targets. + /// Returns (protected_matches, delete_matches) where each is a vec of (target_index, matched_paths). + fn execute(&self) -> Result { + let mut protected_results: HashMap> = HashMap::new(); + let mut delete_results: HashMap> = HashMap::new(); - let mut matches = Vec::new(); - let walker = WalkDir::new(&scan_root) - .follow_links(false) - .into_iter() - .filter_entry(|entry| { - let candidate = path_to_glob_string(entry.path()); - !ignore_matcher + // Process literal paths (non-glob patterns) directly + for (pattern_index, pattern) in self.normalized_patterns.iter().enumerate() { + if has_glob_magic(pattern) { + continue; + } + + let absolute_path = resolve_absolute_path(pattern); + if !absolute_path.exists() { + continue; + } + + let candidate = path_to_glob_string(&absolute_path); + if self + .ignore_matcher .as_ref() .is_some_and(|compiled| compiled.is_match(&candidate)) - }); + { + continue; + } - for entry in walker { - let Ok(entry) = entry else { - continue; - }; - let candidate = path_to_glob_string(entry.path()); - if matcher.is_match(&candidate) { - matches.push(path_to_string(&normalize_path(entry.path()))); + let metadata = &self.metadata[pattern_index]; + let normalized_entry = path_to_string(&absolute_path); + + // Check exclude_basenames for delete targets + if !metadata.is_protected + && !metadata.exclude_basenames.is_empty() + && let Some(basename) = Path::new(&normalized_entry).file_name() + { + let basename_str = basename.to_string_lossy(); + if metadata + .exclude_basenames + .iter() + .any(|excluded| excluded == basename_str.as_ref()) + { + continue; + } + } + + let target_map = if metadata.is_protected { + &mut protected_results + } else { + &mut delete_results + }; + target_map + .entry(metadata.target_index) + .or_default() + .push(normalized_entry); } + + // Process each group's patterns with a single directory walk + for group in &self.groups { + if !group.scan_root.exists() { + continue; + } + + let group_patterns: Vec = group + .pattern_indices + .iter() + .map(|&idx| self.normalized_patterns[idx].clone()) + .collect(); + + let matcher = build_globset(&group_patterns)? + .ok_or_else(|| "failed to compile cleanup glob batch".to_string())?; + + let walker = WalkDir::new(&group.scan_root) + .follow_links(false) + .into_iter() + .filter_entry(|entry| { + let candidate = path_to_glob_string(entry.path()); + !self + .ignore_matcher + .as_ref() + .is_some_and(|compiled| compiled.is_match(&candidate)) + }); + + for entry in walker { + let Ok(entry) = entry else { + continue; + }; + + let candidate = path_to_glob_string(entry.path()); + let matched_indices = matcher.matches(&candidate); + if matched_indices.is_empty() { + continue; + } + + let normalized_entry = path_to_string(&normalize_path(entry.path())); + + for matched_index in matched_indices { + let pattern_index = group.pattern_indices[matched_index]; + let metadata = &self.metadata[pattern_index]; + + // Check exclude_basenames for delete targets + if !metadata.is_protected + && !metadata.exclude_basenames.is_empty() + && let Some(basename) = Path::new(&normalized_entry).file_name() + { + let basename_str = basename.to_string_lossy(); + if metadata + .exclude_basenames + .iter() + .any(|excluded| excluded == basename_str.as_ref()) + { + continue; + } + } + + let target_map = if metadata.is_protected { + &mut protected_results + } else { + &mut delete_results + }; + target_map + .entry(metadata.target_index) + .or_default() + .push(normalized_entry.clone()); + } + } + } + + // Convert HashMaps to sorted Vecs and deduplicate + let mut protected_vec: Vec<(usize, Vec)> = protected_results + .into_iter() + .map(|(idx, mut paths)| { + paths.sort(); + paths.dedup(); + (idx, paths) + }) + .collect(); + protected_vec.sort_by_key(|(idx, _)| *idx); + + let mut delete_vec: Vec<(usize, Vec)> = delete_results + .into_iter() + .map(|(idx, mut paths)| { + paths.sort(); + paths.dedup(); + (idx, paths) + }) + .collect(); + delete_vec.sort_by_key(|(idx, _)| *idx); + + Ok((protected_vec, delete_vec)) + } +} + +/// Legacy function kept for backward compatibility with expand_protected_rules. +/// Prefer using BatchedGlobPlanner for new code. +fn expand_globs(patterns: &[String], ignore_globs: &[String]) -> Result>, String> { + if patterns.is_empty() { + return Ok(Vec::new()); } - matches.sort(); - matches.dedup(); - Ok(matches) + let mut planner = BatchedGlobPlanner::new(ignore_globs)?; + for (index, pattern) in patterns.iter().enumerate() { + planner.add_pattern(pattern, false, index, Vec::new()); + } + + let (_, delete_results) = planner.execute()?; + let mut matches_by_pattern = vec![Vec::new(); patterns.len()]; + for (target_index, paths) in delete_results { + matches_by_pattern[target_index] = paths; + } + + Ok(matches_by_pattern) } fn expand_protected_rules(rules: &[ProtectedRuleDto]) -> Result, String> { let mut expanded = Vec::new(); + let mut glob_rules = Vec::new(); for rule in rules { if !matches!(rule.matcher, Some(ProtectionRuleMatcherDto::Glob)) { @@ -499,8 +714,18 @@ fn expand_protected_rules(rules: &[ProtectedRuleDto]) -> Result>(), + &[], + )?; + for (rule, matched_paths) in glob_rules.iter().zip(matched_paths_by_rule) { + for matched_path in matched_paths { expanded.push(create_protected_rule( &matched_path, rule.protection_mode, @@ -669,7 +894,7 @@ fn collect_workspace_reserved_rules( rules.push(create_protected_rule( &format!("{workspace_dir}/aindex/{series_name}/**/*.mdx"), ProtectionModeDto::Direct, - &format!("reserved workspace aindex {series_name} mdx files"), + format!("reserved workspace aindex {series_name} mdx files"), "workspace-reserved", Some(ProtectionRuleMatcherDto::Glob), )); @@ -861,21 +1086,54 @@ fn should_skip_empty_directory_tree(workspace_dir: &str, current_dir: &str) -> b .is_some_and(|basename| EMPTY_DIRECTORY_SCAN_EXCLUDED_BASENAMES.contains(&basename)) } +/// Check if a directory path should be excluded from empty-directory scan +/// because it matches a user-supplied glob from aindex.config.ts. +fn matches_empty_dir_exclude_globs( + dir_path: &Path, + workspace_dir: &Path, + absolute_exclude_set: &Option, + relative_exclude_set: &Option, +) -> bool { + let absolute_match = absolute_exclude_set + .as_ref() + .is_some_and(|globs| globs.is_match(path_to_glob_string(dir_path))); + if absolute_match { + return true; + } + + relative_exclude_set.as_ref().is_some_and(|globs| { + normalize_workspace_relative_path(dir_path, workspace_dir) + .is_some_and(|relative_path| globs.is_match(relative_path)) + }) +} + fn collect_empty_workspace_directories( current_dir: &Path, - workspace_dir: &str, + workspace_dir: &Path, files_to_delete: &HashSet, dirs_to_delete: &HashSet, empty_dirs_to_delete: &mut BTreeSet, + empty_dir_absolute_exclude: &Option, + empty_dir_relative_exclude: &Option, ) -> bool { let current_dir = normalize_path(current_dir); let current_dir_string = path_to_string(¤t_dir); + let workspace_dir_string = path_to_string(workspace_dir); if dirs_to_delete.contains(¤t_dir_string) { return true; } - if should_skip_empty_directory_tree(workspace_dir, ¤t_dir_string) { + if should_skip_empty_directory_tree(&workspace_dir_string, ¤t_dir_string) { + return false; + } + + if matches_empty_dir_exclude_globs( + ¤t_dir, + workspace_dir, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ) { return false; } @@ -904,7 +1162,17 @@ fn collect_empty_workspace_directories( }; if file_type.is_dir() { - if should_skip_empty_directory_tree(workspace_dir, &entry_string) { + if should_skip_empty_directory_tree(&workspace_dir_string, &entry_string) { + has_retained_entries = true; + continue; + } + + if matches_empty_dir_exclude_globs( + &entry_path, + workspace_dir, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, + ) { has_retained_entries = true; continue; } @@ -915,6 +1183,8 @@ fn collect_empty_workspace_directories( files_to_delete, dirs_to_delete, empty_dirs_to_delete, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, ) { empty_dirs_to_delete.insert(entry_string); continue; @@ -939,8 +1209,10 @@ fn plan_workspace_empty_directory_cleanup( files_to_delete: &[String], dirs_to_delete: &[String], guard: &ProtectedDeletionGuard, + empty_dir_absolute_exclude: &Option, + empty_dir_relative_exclude: &Option, ) -> (Vec, Vec) { - let workspace_dir = path_to_string(&resolve_absolute_path(workspace_dir)); + let workspace_dir = resolve_absolute_path(workspace_dir); let files_to_delete = files_to_delete .iter() .map(|path| path_to_string(&resolve_absolute_path(path))) @@ -952,11 +1224,13 @@ fn plan_workspace_empty_directory_cleanup( let mut discovered_empty_dirs = BTreeSet::new(); collect_empty_workspace_directories( - Path::new(&workspace_dir), + &workspace_dir, &workspace_dir, &files_to_delete, &dirs_to_delete, &mut discovered_empty_dirs, + empty_dir_absolute_exclude, + empty_dir_relative_exclude, ); let mut safe_empty_dirs = Vec::new(); @@ -1017,17 +1291,17 @@ fn detect_cleanup_protection_conflicts( conflicts } -fn should_exclude_cleanup_match(matched_path: &str, target: &CleanupTargetDto) -> bool { - if target.exclude_basenames.is_empty() { - return false; - } +#[derive(Debug, Clone)] +struct ProtectedGlobCleanupTarget { + path: String, + protection_mode: ProtectionModeDto, + reason: String, + source: String, +} - let basename = Path::new(matched_path) - .file_name() - .map(|value| value.to_string_lossy().into_owned()); - basename - .as_ref() - .is_some_and(|value| target.exclude_basenames.contains(value)) +#[derive(Debug, Clone)] +struct DeleteGlobCleanupTarget { + target: CleanupTargetDto, } fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionModeDto { @@ -1049,6 +1323,8 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .map(|value| (*value).to_string()), ); let mut output_path_owners = HashMap::>::new(); + let mut protected_glob_targets = Vec::::new(); + let mut delete_glob_targets = Vec::::new(); for plugin_snapshot in &snapshot.plugin_snapshots { for output in &plugin_snapshot.outputs { @@ -1070,22 +1346,16 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { for plugin_snapshot in &snapshot.plugin_snapshots { for target in &plugin_snapshot.cleanup.protect { if target.kind == CleanupTargetKindDto::Glob { - let protection_mode = default_protection_mode_for_target(target); - let reason = target - .label - .as_ref() - .map(|label| format!("plugin cleanup protect declaration ({label})")) - .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); - - for matched_path in expand_glob(&target.path, &ignore_globs)? { - protected_rules.push(create_protected_rule( - &matched_path, - protection_mode, - reason.clone(), - format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), - None, - )); - } + protected_glob_targets.push(ProtectedGlobCleanupTarget { + path: target.path.clone(), + protection_mode: default_protection_mode_for_target(target), + reason: target + .label + .as_ref() + .map(|label| format!("plugin cleanup protect declaration ({label})")) + .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()), + source: format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + }); continue; } @@ -1105,20 +1375,9 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { for target in &plugin_snapshot.cleanup.delete { if target.kind == CleanupTargetKindDto::Glob { - for matched_path in expand_glob(&target.path, &ignore_globs)? { - if should_exclude_cleanup_match(&matched_path, target) { - continue; - } - - let Ok(metadata) = fs::symlink_metadata(&matched_path) else { - continue; - }; - if metadata.is_dir() { - delete_dirs.insert(path_to_string(&resolve_absolute_path(&matched_path))); - } else { - delete_files.insert(path_to_string(&resolve_absolute_path(&matched_path))); - } - } + delete_glob_targets.push(DeleteGlobCleanupTarget { + target: target.clone(), + }); continue; } @@ -1134,6 +1393,61 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } } + // Batch all glob patterns (both protected and delete) into a single planner + // to minimize directory walks. This is the key performance optimization. + let mut planner = BatchedGlobPlanner::new(&ignore_globs)?; + + // Add protected glob targets + for (index, target) in protected_glob_targets.iter().enumerate() { + planner.add_pattern( + &target.path, + true, // is_protected + index, + Vec::new(), // protected globs don't use exclude_basenames + ); + } + + // Add delete glob targets + for (index, target) in delete_glob_targets.iter().enumerate() { + planner.add_pattern( + &target.target.path, + false, // is_delete + index, + target.target.exclude_basenames.clone(), + ); + } + + // Execute the batched glob expansion + let (protected_results, delete_results) = planner.execute()?; + + // Fan protected glob results back to their targets + for (target_index, matched_paths) in protected_results { + let target = &protected_glob_targets[target_index]; + for matched_path in matched_paths { + protected_rules.push(create_protected_rule( + &matched_path, + target.protection_mode, + target.reason.clone(), + target.source.clone(), + None, + )); + } + } + + // Fan delete glob results back to their targets + for (_target_index, matched_paths) in delete_results { + for matched_path in matched_paths { + let Ok(metadata) = fs::symlink_metadata(&matched_path) else { + continue; + }; + if metadata.is_dir() { + delete_dirs.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } else { + delete_files.insert(path_to_string(&resolve_absolute_path(&matched_path))); + } + } + } + let guard = create_guard(&snapshot, &protected_rules)?; let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); if !conflicts.is_empty() { @@ -1153,11 +1467,37 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { partition_deletion_targets(&delete_dirs.into_iter().collect::>(), &guard); let (files_to_delete, dirs_to_delete) = compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); + let empty_dir_absolute_exclude_set = build_globset( + &snapshot + .empty_dir_exclude_globs + .iter() + .map(|pattern| { + if expand_home_path(pattern).is_absolute() { + normalize_glob_pattern(pattern) + } else { + path_to_glob_string(&resolve_absolute_path(&format!( + "{}/{}", + snapshot.workspace_dir, pattern + ))) + } + }) + .collect::>(), + )?; + let empty_dir_relative_exclude_set = build_globset( + &snapshot + .empty_dir_exclude_globs + .iter() + .filter(|pattern| !expand_home_path(pattern).is_absolute()) + .map(|pattern| normalize_relative_glob_pattern(pattern)) + .collect::>(), + )?; let (empty_dirs_to_delete, empty_dir_violations) = plan_workspace_empty_directory_cleanup( &snapshot.workspace_dir, &files_to_delete, &dirs_to_delete, &guard, + &empty_dir_absolute_exclude_set, + &empty_dir_relative_exclude_set, ); let mut violations = file_partition.violations; @@ -1280,6 +1620,7 @@ mod tests { project_roots: vec![path_to_string(&workspace_dir.join("project-a"))], protected_rules: Vec::new(), plugin_snapshots: Vec::new(), + empty_dir_exclude_globs: Vec::new(), } } @@ -1596,4 +1937,373 @@ mod tests { assert!(!workspace_dir.join("scratch/empty").exists()); assert!(workspace_dir.join("scratch").exists()); } + + #[test] + fn preserves_empty_directories_excluded_by_workspace_relative_globs() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let excluded_leaf_dir = workspace_dir.join("volumes/cache/leaf"); + let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); + + fs::create_dir_all(&excluded_leaf_dir).unwrap(); + fs::create_dir_all(®ular_leaf_dir).unwrap(); + + let mut snapshot = + single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); + snapshot.empty_dir_exclude_globs = vec!["volumes/**".to_string()]; + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&excluded_leaf_dir))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(®ular_leaf_dir))); + } + + #[test] + fn skips_reserved_volume_trees_during_empty_directory_scan() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let volumes_leaf_dir = workspace_dir.join("volumes/cache/leaf"); + let hidden_volumes_leaf_dir = workspace_dir.join(".volumes/cache/leaf"); + let regular_leaf_dir = workspace_dir.join("scratch/empty/leaf"); + + fs::create_dir_all(&volumes_leaf_dir).unwrap(); + fs::create_dir_all(&hidden_volumes_leaf_dir).unwrap(); + fs::create_dir_all(®ular_leaf_dir).unwrap(); + + let snapshot = + single_plugin_snapshot(&workspace_dir, vec![], CleanupDeclarationsDto::default()); + + let plan = plan_cleanup(snapshot).unwrap(); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&volumes_leaf_dir))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join(".volumes/cache")))); + assert!(!plan + .empty_dirs_to_delete + .contains(&path_to_string(&hidden_volumes_leaf_dir))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(&workspace_dir.join("scratch/empty")))); + assert!(plan + .empty_dirs_to_delete + .contains(&path_to_string(®ular_leaf_dir))); + } + + #[test] + fn batched_glob_planner_handles_multiple_globs_sharing_root() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let cache_dir = workspace_dir.join("cache"); + let temp_dir_path = workspace_dir.join("temp"); + let logs_dir = workspace_dir.join("logs"); + + // Create test directories + fs::create_dir_all(cache_dir.join("sub1")).unwrap(); + fs::create_dir_all(cache_dir.join("sub2")).unwrap(); + fs::create_dir_all(temp_dir_path.join("tmp1")).unwrap(); + fs::create_dir_all(logs_dir.join("2024")).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![ + CleanupTargetDto { + path: path_to_string(&cache_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: Some("cache-cleanup".to_string()), + }, + CleanupTargetDto { + path: path_to_string(&temp_dir_path.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: Some("temp-cleanup".to_string()), + }, + ], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // Should match subdirectories under cache/ and temp/ but not logs/ + assert_eq!(plan.dirs_to_delete.len(), 3); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&cache_dir.join("sub1")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&cache_dir.join("sub2")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&temp_dir_path.join("tmp1")))); + assert!(!plan + .dirs_to_delete + .contains(&path_to_string(&logs_dir.join("2024")))); + } + + #[test] + fn batched_glob_planner_handles_mixed_protect_and_delete_globs() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let data_dir = workspace_dir.join("data"); + let keep_dir = data_dir.join("keep"); + let delete_dir = data_dir.join("delete"); + + fs::create_dir_all(&keep_dir).unwrap(); + fs::create_dir_all(&delete_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&data_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + protect: vec![CleanupTargetDto { + // Protect the keep_dir itself using Recursive mode to protect its descendants too + path: path_to_string(&keep_dir), + kind: CleanupTargetKindDto::Directory, + exclude_basenames: Vec::new(), + protection_mode: Some(ProtectionModeDto::Recursive), + scope: None, + label: Some("protect-keep".to_string()), + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // delete_dir should be deleted, keep_dir should NOT be deleted (protected by Directory target) + assert!(plan.dirs_to_delete.contains(&path_to_string(&delete_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); + // keep_dir is protected, so attempting to delete it is a violation + assert!(plan + .violations + .iter() + .any(|v| v.target_path == path_to_string(&keep_dir))); + } + + #[test] + fn batched_glob_planner_respects_exclude_basenames() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let build_dir = workspace_dir.join("build"); + let release_dir = build_dir.join("release"); + let debug_dir = build_dir.join("debug"); + let keep_dir = build_dir.join(".gitkeep"); + + fs::create_dir_all(&release_dir).unwrap(); + fs::create_dir_all(&debug_dir).unwrap(); + fs::create_dir_all(&keep_dir).unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&build_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: vec![".gitkeep".to_string()], + protection_mode: None, + scope: None, + label: Some("build-cleanup".to_string()), + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + // Should delete release and debug, but not .gitkeep + assert!(plan.dirs_to_delete.contains(&path_to_string(&release_dir))); + assert!(plan.dirs_to_delete.contains(&path_to_string(&debug_dir))); + assert!(!plan.dirs_to_delete.contains(&path_to_string(&keep_dir))); + } + + #[test] + fn batched_glob_planner_produces_stable_sorted_output() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let outputs_dir = workspace_dir.join("outputs"); + + // Create directories in non-alphabetical order + let dirs = vec!["zeta", "alpha", "beta", "gamma", "delta"]; + for dir in &dirs { + fs::create_dir_all(outputs_dir.join(dir)).unwrap(); + } + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&outputs_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + + // Verify output is sorted + let expected_order: Vec = dirs + .iter() + .map(|d| path_to_string(&outputs_dir.join(d))) + .collect::>() + .into_iter() + .collect::>() + .into_iter() + .collect(); + + assert_eq!(plan.dirs_to_delete, expected_order); + + // Run multiple times to ensure stability + for _ in 0..3 { + let plan2 = plan_cleanup(single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&outputs_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + )) + .unwrap(); + assert_eq!(plan.dirs_to_delete, plan2.dirs_to_delete); + } + } + + #[test] + fn batched_glob_planner_handles_file_vs_directory_classification() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let mixed_dir = workspace_dir.join("mixed"); + let file_path = mixed_dir.join("file.txt"); + let dir_path = mixed_dir.join("subdir"); + + fs::create_dir_all(&dir_path).unwrap(); + fs::write(&file_path, "content").unwrap(); + + let snapshot = single_plugin_snapshot( + &workspace_dir, + vec![], + CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&mixed_dir.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + ); + + let plan = plan_cleanup(snapshot).unwrap(); + + // Files should be in files_to_delete, dirs in dirs_to_delete + assert!(plan.files_to_delete.contains(&path_to_string(&file_path))); + assert!(plan.dirs_to_delete.contains(&path_to_string(&dir_path))); + } + + #[test] + fn batched_glob_planner_handles_cross_plugin_glob_batching() { + let temp_dir = tempdir().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let project_a = workspace_dir.join("project-a/temp"); + let project_b = workspace_dir.join("project-b/temp"); + + fs::create_dir_all(project_a.join("old")).unwrap(); + fs::create_dir_all(project_b.join("cache")).unwrap(); + + // Multi-plugin snapshot to test cross-plugin batching + let snapshot = CleanupSnapshot { + workspace_dir: path_to_string(&workspace_dir), + aindex_dir: Some(path_to_string(&workspace_dir.join("aindex"))), + project_roots: vec![ + path_to_string(&workspace_dir.join("project-a")), + path_to_string(&workspace_dir.join("project-b")), + ], + protected_rules: Vec::new(), + plugin_snapshots: vec![ + PluginCleanupSnapshotDto { + plugin_name: "PluginA".to_string(), + outputs: vec![], + cleanup: CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&project_a.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + }, + PluginCleanupSnapshotDto { + plugin_name: "PluginB".to_string(), + outputs: vec![], + cleanup: CleanupDeclarationsDto { + delete: vec![CleanupTargetDto { + path: path_to_string(&project_b.join("*")), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: None, + label: None, + }], + ..CleanupDeclarationsDto::default() + }, + }, + ], + empty_dir_exclude_globs: Vec::new(), + }; + + let plan = plan_cleanup(snapshot).unwrap(); + + // Both plugins' globs should be resolved + assert_eq!(plan.dirs_to_delete.len(), 2); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&project_a.join("old")))); + assert!(plan + .dirs_to_delete + .contains(&path_to_string(&project_b.join("cache")))); + } } diff --git a/cli/src/core/config/mod.rs b/cli/src/core/config/mod.rs index 8ed09410..6a1181b8 100644 --- a/cli/src/core/config/mod.rs +++ b/cli/src/core/config/mod.rs @@ -229,11 +229,11 @@ fn normalize_posix_like_path(raw_path: &str) -> String { } if component == ".." { - if let Some(last_component) = components.last() { - if *last_component != ".." { - components.pop(); - continue; - } + if let Some(last_component) = components.last() + && *last_component != ".." + { + components.pop(); + continue; } if !has_root { @@ -538,17 +538,16 @@ pub fn resolve_runtime_environment() -> RuntimeEnvironmentContext { /// Resolve `~` prefix to the user's home directory. pub fn resolve_tilde(p: &str) -> PathBuf { let runtime_environment = resolve_runtime_environment(); - if let Some(rest) = p.strip_prefix('~') { - if let Some(home) = runtime_environment + if let Some(rest) = p.strip_prefix('~') + && let Some(home) = runtime_environment .effective_home_dir .or(runtime_environment.native_home_dir) - { - let rest = rest - .strip_prefix('/') - .or_else(|| rest.strip_prefix('\\')) - .unwrap_or(rest); - return home.join(rest); - } + { + let rest = rest + .strip_prefix('/') + .or_else(|| rest.strip_prefix('\\')) + .unwrap_or(rest); + return home.join(rest); } PathBuf::from(p) } @@ -875,10 +874,10 @@ pub fn load_user_config(cwd: &Path) -> Result { /// Write a config file with pretty JSON formatting. pub fn write_config(path: &Path, config: &UserConfigFile, logger: &Logger) { - if let Some(parent) = path.parent() { - if !parent.exists() { - let _ = fs::create_dir_all(parent); - } + if let Some(parent) = path.parent() + && !parent.exists() + { + let _ = fs::create_dir_all(parent); } match serde_json::to_string_pretty(config) { @@ -1209,7 +1208,7 @@ mod tests { workspace_dir: Some("~/ws".into()), ..Default::default() }; - let result = merge_configs(&[config.clone()]); + let result = merge_configs(std::slice::from_ref(&config)); assert_eq!(result, config); } diff --git a/cli/src/core/desk_paths.rs b/cli/src/core/desk_paths.rs index 6b6d72f0..c308fc6e 100644 --- a/cli/src/core/desk_paths.rs +++ b/cli/src/core/desk_paths.rs @@ -66,10 +66,10 @@ fn get_windows_fixed_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { } fn get_linux_data_dir(ctx: &config::RuntimeEnvironmentContext) -> PathBuf { - if let Ok(xdg_data_home) = env::var("XDG_DATA_HOME") { - if !xdg_data_home.trim().is_empty() { - return PathBuf::from(resolve_user_path(&xdg_data_home, ctx)); - } + if let Ok(xdg_data_home) = env::var("XDG_DATA_HOME") + && !xdg_data_home.trim().is_empty() + { + return PathBuf::from(resolve_user_path(&xdg_data_home, ctx)); } get_home_dir(ctx).join(".local").join("share") } @@ -455,28 +455,30 @@ mod napi_binding { super::read_file_sync(path).map_err(|err| napi::Error::from_reason(err.to_string())) } - #[allow(non_snake_case)] #[napi(object)] pub struct NapiDeletionError { pub path: String, pub error: String, } - #[allow(non_snake_case)] #[napi(object)] pub struct NapiDeletionResult { pub deleted: u32, - pub deletedPaths: Vec, + #[napi(js_name = "deletedPaths")] + pub deleted_paths: Vec, pub errors: Vec, } - #[allow(non_snake_case)] #[napi(object)] pub struct NapiDeleteTargetsResult { - pub deletedFiles: Vec, - pub deletedDirs: Vec, - pub fileErrors: Vec, - pub dirErrors: Vec, + #[napi(js_name = "deletedFiles")] + pub deleted_files: Vec, + #[napi(js_name = "deletedDirs")] + pub deleted_dirs: Vec, + #[napi(js_name = "fileErrors")] + pub file_errors: Vec, + #[napi(js_name = "dirErrors")] + pub dir_errors: Vec, } fn to_napi_error(err: DeletionError) -> NapiDeletionError { @@ -491,7 +493,7 @@ mod napi_binding { let result = super::delete_files(&paths); NapiDeletionResult { deleted: result.deleted as u32, - deletedPaths: result.deleted_paths, + deleted_paths: result.deleted_paths, errors: result.errors.into_iter().map(to_napi_error).collect(), } } @@ -501,7 +503,7 @@ mod napi_binding { let result = super::delete_directories(&paths); NapiDeletionResult { deleted: result.deleted as u32, - deletedPaths: result.deleted_paths, + deleted_paths: result.deleted_paths, errors: result.errors.into_iter().map(to_napi_error).collect(), } } @@ -511,7 +513,7 @@ mod napi_binding { let result = super::delete_empty_directories(&paths); NapiDeletionResult { deleted: result.deleted as u32, - deletedPaths: result.deleted_paths, + deleted_paths: result.deleted_paths, errors: result.errors.into_iter().map(to_napi_error).collect(), } } @@ -528,10 +530,10 @@ mod napi_binding { let dirs = paths.dirs.unwrap_or_default(); let result = super::delete_targets(&files, &dirs); NapiDeleteTargetsResult { - deletedFiles: result.deleted_files, - deletedDirs: result.deleted_dirs, - fileErrors: result.file_errors.into_iter().map(to_napi_error).collect(), - dirErrors: result.dir_errors.into_iter().map(to_napi_error).collect(), + deleted_files: result.deleted_files, + deleted_dirs: result.deleted_dirs, + file_errors: result.file_errors.into_iter().map(to_napi_error).collect(), + dir_errors: result.dir_errors.into_iter().map(to_napi_error).collect(), } } } @@ -548,7 +550,7 @@ mod tests { let files_dir = dir.path().join("files"); let dirs_dir = dir.path().join("dirs"); fs::create_dir_all(&files_dir).unwrap(); - fs::create_dir_all(&dirs_dir.join("nested")).unwrap(); + fs::create_dir_all(dirs_dir.join("nested")).unwrap(); let file = files_dir.join("artifact.txt"); fs::write(&file, b"data").unwrap(); let leaf = dirs_dir.join("nested").join("inner.txt"); diff --git a/cli/src/plugins/AgentsOutputPlugin.ts b/cli/src/plugins/AgentsOutputPlugin.ts index 7aca7e77..1ae9ab13 100644 --- a/cli/src/plugins/AgentsOutputPlugin.ts +++ b/cli/src/plugins/AgentsOutputPlugin.ts @@ -22,22 +22,40 @@ export class AgentsOutputPlugin extends AbstractOutputPlugin { }) } - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) return { ...declarations, delete: [ ...declarations.delete ?? [], ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs ] } } - override async declareOutputFiles(ctx: OutputWriteContext): Promise { + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { const results: OutputFileDeclaration[] = [] const promptProjects = this.getProjectPromptOutputProjects(ctx) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project'])) + const activePromptScopes = new Set( + this.selectPromptScopes(ctx, ['project']) + ) if (!activePromptScopes.has('project')) return results for (const [projectIndex, project] of promptProjects.entries()) { @@ -51,7 +69,10 @@ export class AgentsOutputPlugin extends AbstractOutputPlugin { } if (project.childMemoryPrompts != null) { - for (const [childIndex, child] of project.childMemoryPrompts.entries()) { + for (const [ + childIndex, + child + ] of project.childMemoryPrompts.entries()) { results.push({ path: this.resolveFullPath(child.dir), scope: 'project', @@ -69,23 +90,35 @@ export class AgentsOutputPlugin extends AbstractOutputPlugin { ctx: OutputWriteContext ): Promise { const projects = this.getProjectPromptOutputProjects(ctx) - const source = declaration.source as {type?: string, projectIndex?: number, childIndex?: number} + const source = declaration.source as { + type?: string + projectIndex?: number + childIndex?: number + } const projectIndex = source.projectIndex ?? -1 - if (projectIndex < 0 || projectIndex >= projects.length) throw new Error(`Invalid project index in declaration for ${this.name}`) + if (projectIndex < 0 || projectIndex >= projects.length) + { throw new Error(`Invalid project index in declaration for ${this.name}`) } const project = projects[projectIndex] - if (project == null) throw new Error(`Project not found for declaration in ${this.name}`) + if (project == null) + { throw new Error(`Project not found for declaration in ${this.name}`) } if (source.type === 'projectRootMemory') { - if (project.rootMemoryPrompt == null) throw new Error(`Root memory prompt missing for project index ${projectIndex}`) + if (project.rootMemoryPrompt == null) + { throw new Error( + `Root memory prompt missing for project index ${projectIndex}` + ) } return project.rootMemoryPrompt.content as string } if (source.type === 'projectChildMemory') { const childIndex = source.childIndex ?? -1 const child = project.childMemoryPrompts?.[childIndex] - if (child == null) throw new Error(`Child memory prompt missing for project ${projectIndex}, child ${childIndex}`) + if (child == null) + { throw new Error( + `Child memory prompt missing for project ${projectIndex}, child ${childIndex}` + ) } return child.content as string } diff --git a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts index 7f012075..cb4fa6f9 100644 --- a/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts +++ b/cli/src/plugins/ClaudeCodeCLIOutputPlugin.ts @@ -1,4 +1,8 @@ -import type {OutputCleanContext, OutputCleanupDeclarations, RulePrompt} from './plugin-core' +import type { + OutputCleanContext, + OutputCleanupDeclarations, + RulePrompt +} from './plugin-core' import {doubleQuoted} from '@truenine/md-compiler/markdown' import {AbstractOutputPlugin} from './plugin-core' @@ -26,7 +30,8 @@ export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { toolPreset: 'claudeCode', commands: { subDir: COMMANDS_SUBDIR, - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + transformFrontMatter: (_cmd, context) => + context.sourceFrontMatter ?? {} }, subagents: { subDir: AGENTS_SUBDIR, @@ -39,23 +44,32 @@ export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { subDir: SKILLS_SUBDIR }, rules: { - transformFrontMatter: (rule: RulePrompt) => ({paths: rule.globs.map(doubleQuoted)}) + transformFrontMatter: (rule: RulePrompt) => ({ + paths: rule.globs.map(doubleQuoted) + }) }, cleanup: { delete: { project: { - dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] + dirs: [ + '.claude/rules', + '.claude/commands', + '.claude/agents', + '.claude/skills' + ] }, global: { files: ['.claude/CLAUDE.md'], - dirs: ['.claude/rules', '.claude/commands', '.claude/agents', '.claude/skills'] + dirs: [ + '.claude/rules', + '.claude/commands', + '.claude/agents', + '.claude/skills' + ] } } }, - wslMirrors: [ - '~/.claude/settings.json', - '~/.claude/config.json' - ], + wslMirrors: ['~/.claude/settings.json', '~/.claude/config.json'], capabilities: { prompt: { scopes: ['project', 'global'], @@ -81,14 +95,28 @@ export class ClaudeCodeCLIOutputPlugin extends AbstractOutputPlugin { }) } - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) return { ...declarations, delete: [ ...declarations.delete ?? [], ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs ] } } diff --git a/cli/src/plugins/CursorOutputPlugin.test.ts b/cli/src/plugins/CursorOutputPlugin.test.ts index 3cc9569f..17fab4a4 100644 --- a/cli/src/plugins/CursorOutputPlugin.test.ts +++ b/cli/src/plugins/CursorOutputPlugin.test.ts @@ -165,7 +165,7 @@ describe('cursorOutputPlugin cleanup', () => { const normalizedCommandsDir = path.join(tempHomeDir, '.cursor', 'commands').replaceAll('\\', '/') const normalizedStaleDir = staleDir.replaceAll('\\', '/') const normalizedPreservedDir = preservedDir.replaceAll('\\', '/') - const skillCleanupTarget = result.delete?.find(target => target.kind === 'glob' && target.path.includes('skills-cursor')) + const skillCleanupTarget = result.delete?.find(target => target.kind === 'glob' && target.path.includes('skills')) const cleanupPlan = await collectDeletionTargets([plugin], createCleanContext()) const normalizedDeleteDirs = cleanupPlan.dirsToDelete.map(target => target.replaceAll('\\', '/')) @@ -211,7 +211,7 @@ describe('cursorOutputPlugin cleanup', () => { const paths = declarations.map(declaration => declaration.path) expect(paths).toContain(path.join(workspaceBase, '.cursor', 'commands', 'dev-build.md')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) expect(paths).toContain(path.join(workspaceBase, '.cursor', 'rules', 'rule-ops-guard.md')) expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) @@ -260,9 +260,9 @@ describe('cursorOutputPlugin cleanup', () => { const paths = declarations.map(declaration => declaration.path) expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills-cursor', 'inspect-locally', 'mcp.json')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-locally', 'mcp.json')) expect(paths).toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) - expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) + expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-locally', 'SKILL.md')) expect(paths).not.toContain(path.join(homeDir, '.cursor', 'mcp.json')) }) @@ -309,11 +309,11 @@ describe('cursorOutputPlugin cleanup', () => { const declarations = await plugin.declareOutputFiles(ctx) const paths = declarations.map(declaration => declaration.path) - expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) + expect(paths).toContain(path.join(workspaceBase, '.cursor', 'skills', 'ship-it', 'SKILL.md')) expect(paths).toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'inspect-globally', 'mcp.json')) expect(paths).toContain(path.join(homeDir, '.cursor', 'mcp.json')) expect(paths).not.toContain(path.join(homeDir, '.cursor', 'skills-cursor', 'ship-it', 'SKILL.md')) - expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills-cursor', 'inspect-globally', 'SKILL.md')) + expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'skills', 'inspect-globally', 'SKILL.md')) expect(paths).not.toContain(path.join(workspaceBase, '.cursor', 'mcp.json')) }) diff --git a/cli/src/plugins/CursorOutputPlugin.ts b/cli/src/plugins/CursorOutputPlugin.ts index 60941873..4fc08530 100644 --- a/cli/src/plugins/CursorOutputPlugin.ts +++ b/cli/src/plugins/CursorOutputPlugin.ts @@ -30,16 +30,24 @@ const COMMANDS_SUBDIR = OutputSubdirectories.COMMANDS const RULES_SUBDIR = OutputSubdirectories.RULES const GLOBAL_RULE_FILE = OutputFileNames.CURSOR_GLOBAL_RULE const SKILLS_CURSOR_SUBDIR = OutputSubdirectories.CURSOR_SKILLS +const SKILLS_PROJECT_SUBDIR = 'skills' const SKILL_FILE_NAME = OutputFileNames.SKILL const PRESERVED_SKILLS = PreservedSkills.CURSOR type CursorOutputSource = | {readonly kind: 'command', readonly command: CommandPrompt} - | {readonly kind: 'mcpConfig', readonly mcpServers: Record>} + | { + readonly kind: 'mcpConfig' + readonly mcpServers: Record> + } | {readonly kind: 'skill', readonly skill: SkillPrompt} | {readonly kind: 'skillMcpConfig', readonly rawContent: string} | {readonly kind: 'skillChildDoc', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } | {readonly kind: 'globalRuleContent', readonly content: string} | {readonly kind: 'ruleMdc', readonly rule: RulePrompt} | {readonly kind: 'ignoreFile', readonly content: string} @@ -54,7 +62,8 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { indexignore: IgnoreFiles.CURSOR, commands: { subDir: COMMANDS_SUBDIR, - transformFrontMatter: (_cmd, context) => context.sourceFrontMatter ?? {} + transformFrontMatter: (_cmd, context) => + context.sourceFrontMatter ?? {} }, skills: { subDir: SKILLS_CURSOR_SUBDIR @@ -69,7 +78,7 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { project: { files: ['.cursor/mcp.json'], dirs: ['.cursor/commands', '.cursor/rules'], - globs: ['.cursor/skills-cursor/*'] + globs: ['.cursor/skills/*', '.cursor/skills-cursor/*'] }, global: { files: ['.cursor/mcp.json'], @@ -78,13 +87,17 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } }, protect: { - project: { - dirs: Array.from(PRESERVED_SKILLS, skillName => `.cursor/skills-cursor/${skillName}`) - }, global: { - dirs: Array.from(PRESERVED_SKILLS, skillName => `.cursor/skills-cursor/${skillName}`) + dirs: Array.from( + PRESERVED_SKILLS, + skillName => `.cursor/skills-cursor/${skillName}` + ) } - } + }, + excludeScanGlobs: Array.from( + PRESERVED_SKILLS, + skillName => `.cursor/skills-cursor/${skillName}/**` + ) }, capabilities: { prompt: { @@ -111,7 +124,9 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { }) } - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { const declarations = await super.declareCleanupPaths(ctx) return { ...declarations, @@ -119,7 +134,8 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { if (target.kind !== 'glob') return target const normalizedPath = target.path.replaceAll('\\', '/') - if (!normalizedPath.endsWith(`/.cursor/${SKILLS_CURSOR_SUBDIR}/*`)) return target + if (!normalizedPath.endsWith(`/.cursor/${SKILLS_CURSOR_SUBDIR}/*`)) + { return target } return { ...target, @@ -129,46 +145,67 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } } - override async declareOutputFiles(ctx: OutputWriteContext): Promise { + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext + const {globalMemory, commands, skills, rules, aiAgentIgnoreConfigFiles} + = ctx.collectedOutputContext const globalDir = this.getGlobalConfigDir() - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const promptSourceProjectConfig + = this.resolvePromptSourceProjectConfig(ctx) const concreteProjects = this.getConcreteProjects(ctx) const promptProjects = this.getProjectPromptOutputProjects(ctx) - const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) - const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['global'])) - const activeRuleScopes = new Set(rules != null ? this.selectRuleScopes(ctx, rules) : []) - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const selectedMcpSkills = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} - const selectedCommands = commands != null - ? this.selectSingleScopeItems( - commands, - this.commandsConfig.sourceScopes, - command => this.resolveCommandSourceScope(command), - this.getTopicScopeOverride(ctx, 'commands') - ) - : {items: [] as readonly CommandPrompt[]} + const transformOptions = this.getTransformOptionsFromContext(ctx, { + includeSeriesPrefix: true + }) + const activePromptScopes = new Set( + this.selectPromptScopes(ctx, ['global']) + ) + const activeRuleScopes = new Set( + rules != null ? this.selectRuleScopes(ctx, rules) : [] + ) + const selectedSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') + ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedCommands + = commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + command => this.resolveCommandSourceScope(command), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} const pushSkillDeclarations = ( baseDir: string, scope: 'project' | 'global', filteredSkills: readonly SkillPrompt[] ): void => { + const skillsSubDir + = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR for (const skill of filteredSkills) { const skillName = this.getSkillName(skill) if (this.isPreservedSkill(skillName)) continue - const skillDir = path.join(baseDir, SKILLS_CURSOR_SUBDIR, skillName) + const skillDir = path.join(baseDir, skillsSubDir, skillName) declarations.push({ path: path.join(skillDir, SKILL_FILE_NAME), scope, @@ -178,7 +215,10 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { if (skill.childDocs != null) { for (const childDoc of skill.childDocs) { declarations.push({ - path: path.join(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + path: path.join( + skillDir, + childDoc.relativePath.replace(/\.mdx$/, '.md') + ), scope, source: { kind: 'skillChildDoc', @@ -209,10 +249,16 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { scope: 'project' | 'global', filteredMcpSkills: readonly SkillPrompt[] ): void => { + const skillsSubDir + = scope === 'global' ? SKILLS_CURSOR_SUBDIR : SKILLS_PROJECT_SUBDIR for (const skill of filteredMcpSkills) { if (skill.mcpConfig == null) continue - const skillDir = path.join(baseDir, SKILLS_CURSOR_SUBDIR, this.getSkillName(skill)) + const skillDir = path.join( + baseDir, + skillsSubDir, + this.getSkillName(skill) + ) declarations.push({ path: path.join(skillDir, MCP_CONFIG_FILE), scope, @@ -239,37 +285,62 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { scope, source: { kind: 'mcpConfig', - mcpServers: transformMcpServerMap(servers, transformMcpConfigForCursor) + mcpServers: transformMcpServerMap( + servers, + transformMcpConfigForCursor + ) } satisfies CursorOutputSource }) } - if (selectedSkills.selectedScope === 'project' || selectedMcpSkills.selectedScope === 'project') { + if ( + selectedSkills.selectedScope === 'project' + || selectedMcpSkills.selectedScope === 'project' + ) { for (const project of this.getProjectOutputProjects(ctx)) { const baseDir = this.resolveProjectConfigDir(ctx, project) if (baseDir == null) continue if (selectedSkills.selectedScope === 'project') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + project.projectConfig, + 'skills' + ) pushSkillDeclarations(baseDir, 'project', filteredSkills) } if (selectedMcpSkills.selectedScope === 'project') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + project.projectConfig, + 'skills' + ) pushSkillMcpDeclarations(baseDir, 'project', filteredMcpSkills) pushMcpDeclaration(baseDir, 'project', filteredMcpSkills) } } } - if (selectedSkills.selectedScope === 'global' || selectedMcpSkills.selectedScope === 'global') { + if ( + selectedSkills.selectedScope === 'global' + || selectedMcpSkills.selectedScope === 'global' + ) { if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + promptSourceProjectConfig, + 'skills' + ) pushSkillDeclarations(globalDir, 'global', filteredSkills) } if (selectedMcpSkills.selectedScope === 'global') { - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + promptSourceProjectConfig, + 'skills' + ) pushSkillMcpDeclarations(globalDir, 'global', filteredMcpSkills) pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) } @@ -280,10 +351,18 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { const baseDir = this.resolveProjectConfigDir(ctx, project) if (baseDir == null) continue - const filteredCommands = filterByProjectConfig(selectedCommands.items, project.projectConfig, 'commands') + const filteredCommands = filterByProjectConfig( + selectedCommands.items, + project.projectConfig, + 'commands' + ) for (const command of filteredCommands) { declarations.push({ - path: path.join(baseDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + path: path.join( + baseDir, + COMMANDS_SUBDIR, + this.transformCommandName(command, transformOptions) + ), scope: 'project', source: {kind: 'command', command} satisfies CursorOutputSource }) @@ -292,10 +371,18 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } if (selectedCommands.selectedScope === 'global') { - const filteredCommands = filterByProjectConfig(selectedCommands.items, promptSourceProjectConfig, 'commands') + const filteredCommands = filterByProjectConfig( + selectedCommands.items, + promptSourceProjectConfig, + 'commands' + ) for (const command of filteredCommands) { declarations.push({ - path: path.join(globalDir, COMMANDS_SUBDIR, this.transformCommandName(command, transformOptions)), + path: path.join( + globalDir, + COMMANDS_SUBDIR, + this.transformCommandName(command, transformOptions) + ), scope: 'global', source: {kind: 'command', command} satisfies CursorOutputSource }) @@ -303,11 +390,18 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } if (rules != null && rules.length > 0) { - const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') + const globalRules = rules.filter( + rule => + this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global' + ) if (activeRuleScopes.has('global')) { for (const rule of globalRules) { declarations.push({ - path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + path: path.join( + globalDir, + RULES_SUBDIR, + this.buildRuleFileName(rule) + ), scope: 'global', source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource }) @@ -319,12 +413,24 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { const projectBaseDir = this.resolveProjectConfigDir(ctx, project) if (projectBaseDir == null) continue const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig(rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), project.projectConfig, 'rules'), + filterByProjectConfig( + rules.filter( + rule => + this.normalizeSourceScope(this.normalizeRuleScope(rule)) + === 'project' + ), + project.projectConfig, + 'rules' + ), project.projectConfig ) for (const rule of projectRules) { declarations.push({ - path: path.join(projectBaseDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + path: path.join( + projectBaseDir, + RULES_SUBDIR, + this.buildRuleFileName(rule) + ), scope: 'project', source: {kind: 'ruleMdc', rule} satisfies CursorOutputSource }) @@ -334,7 +440,10 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } if (globalMemory != null && activePromptScopes.has('global')) { - const globalRuleContent = this.buildGlobalRuleContent(globalMemory.content as string, ctx) + const globalRuleContent = this.buildGlobalRuleContent( + globalMemory.content as string, + ctx + ) for (const project of promptProjects) { const projectBaseDir = this.resolveProjectConfigDir(ctx, project) if (projectBaseDir == null) continue @@ -350,15 +459,23 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { } const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + const ignoreFile + = this.indexignore == null + ? void 0 + : aiAgentIgnoreConfigFiles?.find( + file => file.fileName === this.indexignore + ) if (ignoreOutputPath != null && ignoreFile != null) { for (const project of concreteProjects) { const projectDir = project.dirFromWorkspacePath - if (projectDir == null || project.isPromptSourceProject === true) continue + if (projectDir == null || project.isPromptSourceProject === true) + { continue } declarations.push({ - path: path.join(projectDir.basePath, projectDir.path, ignoreOutputPath), + path: path.join( + projectDir.basePath, + projectDir.path, + ignoreOutputPath + ), scope: 'project', source: { kind: 'ignoreFile', @@ -377,30 +494,58 @@ export class CursorOutputPlugin extends AbstractOutputPlugin { ): Promise { const source = declaration.source as CursorOutputSource switch (source.kind) { - case 'command': return this.buildCommandContent(source.command, ctx) - case 'mcpConfig': return JSON.stringify({mcpServers: source.mcpServers}, null, 2) + case 'command': + return this.buildCommandContent(source.command, ctx) + case 'mcpConfig': + return JSON.stringify({mcpServers: source.mcpServers}, null, 2) case 'skill': { const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) + return this.buildMarkdownContent( + source.skill.content as string, + frontMatterData, + ctx + ) } - case 'skillMcpConfig': return source.rawContent + case 'skillMcpConfig': + return source.rawContent case 'skillChildDoc': case 'globalRuleContent': - case 'ignoreFile': return source.content - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - case 'ruleMdc': return this.buildRuleMdcContent(source.rule, ctx) - default: throw new Error(`Unsupported declaration source for ${this.name}`) + case 'ignoreFile': + return source.content + case 'skillResource': + return source.encoding === 'base64' + ? Buffer.from(source.content, 'base64') + : source.content + case 'ruleMdc': + return this.buildRuleMdcContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for ${this.name}`) } } - private buildGlobalRuleContent(content: string, ctx: OutputWriteContext): string { - return this.buildMarkdownContent(content, {description: 'Global prompt (synced)', alwaysApply: true}, ctx) + private buildGlobalRuleContent( + content: string, + ctx: OutputWriteContext + ): string { + return this.buildMarkdownContent( + content, + {description: 'Global prompt (synced)', alwaysApply: true}, + ctx + ) } - private isPreservedSkill(name: string): boolean { return PRESERVED_SKILLS.has(name) } + private isPreservedSkill(name: string): boolean { + return PRESERVED_SKILLS.has(name) + } - protected buildRuleMdcContent(rule: RulePrompt, ctx?: OutputWriteContext): string { - const fmData: Record = {alwaysApply: false, globs: rule.globs.length > 0 ? rule.globs.join(', ') : ''} + protected buildRuleMdcContent( + rule: RulePrompt, + ctx?: OutputWriteContext + ): string { + const fmData: Record = { + alwaysApply: false, + globs: rule.globs.length > 0 ? rule.globs.join(', ') : '' + } const raw = this.buildMarkdownContent(rule.content, fmData, ctx) const lines = raw.split('\n') const transformedLines = lines.map(line => { diff --git a/cli/src/plugins/GeminiCLIOutputPlugin.ts b/cli/src/plugins/GeminiCLIOutputPlugin.ts index cdbcde1e..9c4b9bb2 100644 --- a/cli/src/plugins/GeminiCLIOutputPlugin.ts +++ b/cli/src/plugins/GeminiCLIOutputPlugin.ts @@ -1,4 +1,7 @@ -import type {OutputCleanContext, OutputCleanupDeclarations} from './plugin-core' +import type { + OutputCleanContext, + OutputCleanupDeclarations +} from './plugin-core' import {AbstractOutputPlugin} from './plugin-core' const PROJECT_MEMORY_FILE = 'GEMINI.md' @@ -26,14 +29,28 @@ export class GeminiCLIOutputPlugin extends AbstractOutputPlugin { }) } - override async declareCleanupPaths(ctx: OutputCleanContext): Promise { + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { const declarations = await super.declareCleanupPaths(ctx) + const promptSourceProjects + = ctx.collectedOutputContext.workspace.projects.filter( + project => project.isPromptSourceProject === true + ) + const promptSourceExcludeGlobs = promptSourceProjects + .map(project => project.dirFromWorkspacePath) + .filter((dir): dir is NonNullable => dir != null) + .map(dir => this.resolvePath(dir.basePath, dir.path, '**')) return { ...declarations, delete: [ ...declarations.delete ?? [], ...this.buildProjectPromptCleanupTargets(ctx, PROJECT_MEMORY_FILE) + ], + excludeScanGlobs: [ + ...declarations.excludeScanGlobs ?? [], + ...promptSourceExcludeGlobs ] } } diff --git a/cli/src/plugins/GenericSkillsOutputPlugin.test.ts b/cli/src/plugins/GenericSkillsOutputPlugin.test.ts index 1100889f..092e54f5 100644 --- a/cli/src/plugins/GenericSkillsOutputPlugin.test.ts +++ b/cli/src/plugins/GenericSkillsOutputPlugin.test.ts @@ -1,4 +1,4 @@ -import type {OutputWriteContext, SkillPrompt} from './plugin-core' +import type {OutputCleanContext, OutputWriteContext, SkillPrompt} from './plugin-core' import * as fs from 'node:fs' import * as path from 'node:path' import {describe, expect, it} from 'vitest' @@ -76,6 +76,31 @@ function createContext( } as OutputWriteContext } +function createCleanContext(): OutputCleanContext { + return { + logger: createLogger('GenericSkillsOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: { + jetbrainsCodexDirs: [] + }, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as OutputCleanContext +} + describe('genericSkillsOutputPlugin synthetic workspace project output', () => { it('writes project-scoped skills into workspace root .agents/skills via the synthetic workspace project', async () => { const workspaceBase = path.resolve('tmp/generic-skills-workspace') @@ -148,3 +173,20 @@ describe('genericSkillsOutputPlugin synthetic workspace project output', () => { ) }) }) + +describe('genericSkillsOutputPlugin cleanup', () => { + it('declares cleanup for the full legacy global ~/.skills directory', async () => { + const homeDir = path.resolve('tmp/generic-skills-home') + const plugin = new TestGenericSkillsOutputPlugin(homeDir) + + const cleanup = await plugin.declareCleanupPaths(createCleanContext()) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain( + path.join(homeDir, '.agents', 'skills').replaceAll('\\', '/') + ) + expect(deletePaths).toContain( + path.join(homeDir, '.skills').replaceAll('\\', '/') + ) + }) +}) diff --git a/cli/src/plugins/GenericSkillsOutputPlugin.ts b/cli/src/plugins/GenericSkillsOutputPlugin.ts index bc2c027e..c7698ab3 100644 --- a/cli/src/plugins/GenericSkillsOutputPlugin.ts +++ b/cli/src/plugins/GenericSkillsOutputPlugin.ts @@ -8,20 +8,28 @@ import {Buffer} from 'node:buffer' import {AbstractOutputPlugin, filterByProjectConfig} from './plugin-core' const PROJECT_SKILLS_DIR = '.agents/skills' +const LEGACY_SKILLS_DIR = '.skills' const SKILL_FILE_NAME = 'SKILL.md' const MCP_CONFIG_FILE = 'mcp.json' type GenericSkillOutputSource - = {readonly kind: 'skillMain', readonly skill: SkillPrompt} + = | {readonly kind: 'skillMain', readonly skill: SkillPrompt} | {readonly kind: 'skillMcp', readonly rawContent: string} | {readonly kind: 'skillChildDoc', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } /** * Output plugin that writes skills directly to each project's .agents/skills/ directory. * * Structure: * - Project: /.agents/skills//SKILL.md, mcp.json, child docs, resources + * + * @deprecated Legacy compact skills output. Cleanup must remove the entire + * global `~/.skills/` directory in addition to the current skill targets. */ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { constructor() { @@ -32,10 +40,10 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { cleanup: { delete: { project: { - dirs: [PROJECT_SKILLS_DIR] + dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] }, global: { - dirs: [PROJECT_SKILLS_DIR] + dirs: [PROJECT_SKILLS_DIR, LEGACY_SKILLS_DIR] } } }, @@ -52,7 +60,9 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { }) } - override async declareOutputFiles(ctx: OutputWriteContext): Promise { + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { const declarations: OutputFileDeclaration[] = [] const {skills} = ctx.collectedOutputContext @@ -68,7 +78,8 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + this.getTopicScopeOverride(ctx, 'mcp') + ?? this.getTopicScopeOverride(ctx, 'skills') ) const pushSkillDeclarations = ( @@ -83,13 +94,19 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { declarations.push({ path: this.joinPath(skillDir, SKILL_FILE_NAME), scope, - source: {kind: 'skillMain', skill} satisfies GenericSkillOutputSource + source: { + kind: 'skillMain', + skill + } satisfies GenericSkillOutputSource }) if (skill.childDocs != null) { for (const childDoc of skill.childDocs) { declarations.push({ - path: this.joinPath(skillDir, childDoc.relativePath.replace(/\.mdx$/, '.md')), + path: this.joinPath( + skillDir, + childDoc.relativePath.replace(/\.mdx$/, '.md') + ), scope, source: { kind: 'skillChildDoc', @@ -124,7 +141,11 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { if (skill.mcpConfig == null) continue declarations.push({ - path: this.joinPath(baseSkillsDir, this.getSkillName(skill), MCP_CONFIG_FILE), + path: this.joinPath( + baseSkillsDir, + this.getSkillName(skill), + MCP_CONFIG_FILE + ), scope, source: { kind: 'skillMcp', @@ -134,33 +155,63 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { } } - if (selectedSkills.selectedScope === 'project' || selectedMcpSkills.selectedScope === 'project') { + if ( + selectedSkills.selectedScope === 'project' + || selectedMcpSkills.selectedScope === 'project' + ) { for (const project of this.getProjectOutputProjects(ctx)) { const projectRootDir = this.resolveProjectRootDir(ctx, project) if (projectRootDir == null) continue - const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + project.projectConfig, + 'skills' + ) + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + project.projectConfig, + 'skills' + ) const baseSkillsDir = this.joinPath(projectRootDir, PROJECT_SKILLS_DIR) - if (selectedSkills.selectedScope === 'project' && filteredSkills.length > 0) pushSkillDeclarations(baseSkillsDir, 'project', filteredSkills) + if ( + selectedSkills.selectedScope === 'project' + && filteredSkills.length > 0 + ) + { pushSkillDeclarations(baseSkillsDir, 'project', filteredSkills) } - if (selectedMcpSkills.selectedScope === 'project') pushMcpDeclarations(baseSkillsDir, 'project', filteredMcpSkills) + if (selectedMcpSkills.selectedScope === 'project') + { pushMcpDeclarations(baseSkillsDir, 'project', filteredMcpSkills) } } } - if (selectedSkills.selectedScope !== 'global' && selectedMcpSkills.selectedScope !== 'global') return declarations + if ( + selectedSkills.selectedScope !== 'global' + && selectedMcpSkills.selectedScope !== 'global' + ) + { return declarations } const baseSkillsDir = this.joinPath(this.getHomeDir(), PROJECT_SKILLS_DIR) - const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) + const promptSourceProjectConfig + = this.resolvePromptSourceProjectConfig(ctx) if (selectedSkills.selectedScope === 'global') { - const filteredSkills = filterByProjectConfig(selectedSkills.items, promptSourceProjectConfig, 'skills') - if (filteredSkills.length > 0) pushSkillDeclarations(baseSkillsDir, 'global', filteredSkills) + const filteredSkills = filterByProjectConfig( + selectedSkills.items, + promptSourceProjectConfig, + 'skills' + ) + if (filteredSkills.length > 0) + { pushSkillDeclarations(baseSkillsDir, 'global', filteredSkills) } } if (selectedMcpSkills.selectedScope !== 'global') return declarations - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') + const filteredMcpSkills = filterByProjectConfig( + selectedMcpSkills.items, + promptSourceProjectConfig, + 'skills' + ) pushMcpDeclarations(baseSkillsDir, 'global', filteredMcpSkills) return declarations } @@ -173,12 +224,22 @@ export class GenericSkillsOutputPlugin extends AbstractOutputPlugin { switch (source.kind) { case 'skillMain': { const frontMatterData = this.buildSkillFrontMatter(source.skill) - return this.buildMarkdownContent(source.skill.content as string, frontMatterData, ctx) + return this.buildMarkdownContent( + source.skill.content as string, + frontMatterData, + ctx + ) } - case 'skillMcp': return source.rawContent - case 'skillChildDoc': return source.content - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - default: throw new Error(`Unsupported declaration source for ${this.name}`) + case 'skillMcp': + return source.rawContent + case 'skillChildDoc': + return source.content + case 'skillResource': + return source.encoding === 'base64' + ? Buffer.from(source.content, 'base64') + : source.content + default: + throw new Error(`Unsupported declaration source for ${this.name}`) } } } diff --git a/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts b/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts index d610d6c2..ed51fbc8 100644 --- a/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts +++ b/cli/src/plugins/OpencodeCLIOutputPlugin.test.ts @@ -1,10 +1,44 @@ -import type {OutputWriteContext, SubAgentPrompt} from './plugin-core' +import type {OutputCleanContext, OutputWriteContext, SubAgentPrompt} from './plugin-core' import * as fs from 'node:fs' +import * as os from 'node:os' import * as path from 'node:path' import {describe, expect, it} from 'vitest' import {OpencodeCLIOutputPlugin} from './OpencodeCLIOutputPlugin' import {createLogger, FilePathKind, PromptKind} from './plugin-core' +class TestOpencodeCLIOutputPlugin extends OpencodeCLIOutputPlugin { + constructor(private readonly testHomeDir: string) { + super() + } + + protected override getHomeDir(): string { + return this.testHomeDir + } +} + +function createCleanContext(): OutputCleanContext { + return { + logger: createLogger('OpencodeCLIOutputPlugin', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Relative, + path: '.', + basePath: '.', + getDirectoryName: () => '.', + getAbsolutePath: () => path.resolve('.') + }, + projects: [] + } + } + } as unknown as OutputCleanContext +} + function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { return { type: PromptKind.SubAgent, @@ -23,10 +57,11 @@ function createSubAgentPrompt(scope: 'project' | 'global'): SubAgentPrompt { canonicalName: 'ops-reviewer', yamlFrontMatter: { description: 'Reviewer', - scope + scope, + namingCase: 'kebab-case' }, markdownContents: [] - } as SubAgentPrompt + } as unknown as SubAgentPrompt } describe('opencodeCLIOutputPlugin synthetic workspace project output', () => { @@ -39,6 +74,7 @@ describe('opencodeCLIOutputPlugin synthetic workspace project output', () => { path, glob: {} as never, dryRun: true, + runtimeTargets: {}, collectedOutputContext: { workspace: { directory: { @@ -46,20 +82,37 @@ describe('opencodeCLIOutputPlugin synthetic workspace project output', () => { path: workspaceBase, getDirectoryName: () => path.basename(workspaceBase) }, - projects: [{ - name: '__workspace__', - isWorkspaceRootProject: true - }] + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true + } + ] }, subAgents: [createSubAgentPrompt('project')] } - } as OutputWriteContext + } as unknown as OutputWriteContext const declarations = await plugin.declareOutputFiles(ctx) - expect(declarations.map(declaration => declaration.path)).toContain( - path.join(workspaceBase, '.opencode', 'agents', 'ops-reviewer.md') - ) + expect(declarations.map(declaration => declaration.path)).toContain(path.join(workspaceBase, '.opencode', 'agents', 'ops-reviewer.md')) expect(declarations.every(declaration => declaration.scope === 'project')).toBe(true) }) }) + +describe('opencodeCLIOutputPlugin cleanup', () => { + it('keeps global opencode.json out of cleanup delete targets', async () => { + const tempHomeDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-opencode-cleanup-')) + + try { + const plugin = new TestOpencodeCLIOutputPlugin(tempHomeDir) + const cleanup = await plugin.declareCleanupPaths(createCleanContext()) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain(path.join(tempHomeDir, '.config', 'opencode', 'AGENTS.md').replaceAll('\\', '/')) + expect(deletePaths).not.toContain(path.join(tempHomeDir, '.config', 'opencode', 'opencode.json').replaceAll('\\', '/')) + } finally { + fs.rmSync(tempHomeDir, {recursive: true, force: true}) + } + }) +}) diff --git a/cli/src/plugins/OpencodeCLIOutputPlugin.ts b/cli/src/plugins/OpencodeCLIOutputPlugin.ts index 0d66f20e..72f6564a 100644 --- a/cli/src/plugins/OpencodeCLIOutputPlugin.ts +++ b/cli/src/plugins/OpencodeCLIOutputPlugin.ts @@ -1,14 +1,7 @@ -import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, SkillPrompt, SubAgentPrompt} from './plugin-core' +import type {CommandPrompt, OutputFileDeclaration, OutputWriteContext, RulePrompt, SkillPrompt, SubAgentPrompt} from './plugin-core' import {Buffer} from 'node:buffer' import * as path from 'node:path' -import { - AbstractOutputPlugin, - collectMcpServersFromSkills, - filterByProjectConfig, - PLUGIN_NAMES, - transformMcpConfigForOpencode, - transformMcpServerMap -} from './plugin-core' +import {AbstractOutputPlugin, applySubSeriesGlobPrefix, filterByProjectConfig, PLUGIN_NAMES} from './plugin-core' const GLOBAL_MEMORY_FILE = 'AGENTS.md' const GLOBAL_CONFIG_DIR = '.config/opencode' @@ -18,6 +11,7 @@ const PROJECT_RULES_DIR = '.opencode' const COMMANDS_SUBDIR = 'commands' const AGENTS_SUBDIR = 'agents' const SKILLS_SUBDIR = 'skills' +const RULES_SUBDIR = 'rules' type OpencodeOutputSource = | {readonly kind: 'globalMemory', readonly content: string} @@ -29,6 +23,7 @@ type OpencodeOutputSource | {readonly kind: 'skillReference', readonly content: string} | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} | {readonly kind: 'mcpConfig', readonly mcpServers: Record>} + | {readonly kind: 'rule', readonly rule: RulePrompt} function transformOpencodeCommandFrontMatter( _cmd: CommandPrompt, @@ -72,19 +67,24 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { skills: { subDir: SKILLS_SUBDIR }, + rules: { + subDir: RULES_SUBDIR, + prefix: 'rule', + sourceScopes: ['project', 'global'] + }, cleanup: { delete: { project: { files: [GLOBAL_MEMORY_FILE, '.opencode/opencode.json'], - dirs: ['.opencode/commands', '.opencode/agents', '.opencode/skills'] + dirs: ['.opencode/commands', '.opencode/agents', '.opencode/skills', '.opencode/rules'] }, global: { - files: ['.config/opencode/AGENTS.md', '.config/opencode/opencode.json'], - dirs: ['.config/opencode/commands', '.config/opencode/agents', '.config/opencode/skills'] + files: ['.config/opencode/AGENTS.md'], + dirs: ['.config/opencode/commands', '.config/opencode/agents', '.config/opencode/skills', '.config/opencode/rules'] }, xdgConfig: { - files: ['opencode/AGENTS.md', 'opencode/opencode.json'], - dirs: ['opencode/commands', 'opencode/agents', 'opencode/skills'] + files: ['opencode/AGENTS.md'], + dirs: ['opencode/commands', 'opencode/agents', 'opencode/skills', 'opencode/rules'] } } }, @@ -94,6 +94,10 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { scopes: ['project', 'global'], singleScope: false }, + rules: { + scopes: ['project', 'global'], + singleScope: false + }, commands: { scopes: ['project', 'global'], singleScope: true @@ -116,28 +120,47 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { override async declareOutputFiles(ctx: OutputWriteContext): Promise { const declarations: OutputFileDeclaration[] = [] - const {globalMemory, commands, subAgents, skills} = ctx.collectedOutputContext + const {globalMemory, commands, subAgents, skills, rules} = ctx.collectedOutputContext const globalDir = this.getGlobalConfigDir() const activePromptScopes = new Set(this.selectPromptScopes(ctx, ['project', 'global'])) const promptProjects = this.getProjectPromptOutputProjects(ctx) const promptSourceProjectConfig = this.resolvePromptSourceProjectConfig(ctx) - const selectedCommands = commands != null - ? this.selectSingleScopeItems(commands, this.commandsConfig.sourceScopes, command => this.resolveCommandSourceScope(command), this.getTopicScopeOverride(ctx, 'commands')) - : {items: [] as readonly CommandPrompt[]} - const selectedSubAgents = subAgents != null - ? this.selectSingleScopeItems(subAgents, this.subAgentsConfig.sourceScopes, subAgent => this.resolveSubAgentSourceScope(subAgent), this.getTopicScopeOverride(ctx, 'subagents')) - : {items: [] as readonly SubAgentPrompt[]} - const selectedSkills = skills != null - ? this.selectSingleScopeItems(skills, this.skillsConfig.sourceScopes, skill => this.resolveSkillSourceScope(skill), this.getTopicScopeOverride(ctx, 'skills')) - : {items: [] as readonly SkillPrompt[]} - const selectedMcpSkills = skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} + const selectedCommands + = commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + command => this.resolveCommandSourceScope(command), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + const selectedSubAgents + = subAgents != null + ? this.selectSingleScopeItems( + subAgents, + this.subAgentsConfig.sourceScopes, + subAgent => this.resolveSubAgentSourceScope(subAgent), + this.getTopicScopeOverride(ctx, 'subagents') + ) + : {items: [] as readonly SubAgentPrompt[]} + const selectedSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} + const selectedMcpSkills + = skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'mcp') ?? this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} if (globalMemory != null && activePromptScopes.has('global')) { declarations.push({ @@ -150,31 +173,7 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { }) } - const pushMcpDeclaration = ( - basePath: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { - if (filteredSkills.length === 0) return - - const servers = collectMcpServersFromSkills(filteredSkills, this.log) - if (servers.size === 0) return - - declarations.push({ - path: path.join(basePath, OPENCODE_CONFIG_FILE), - scope, - source: { - kind: 'mcpConfig', - mcpServers: transformMcpServerMap(servers, transformMcpConfigForOpencode) - } satisfies OpencodeOutputSource - }) - } - - const pushSkillDeclarations = ( - basePath: string, - scope: 'project' | 'global', - filteredSkills: readonly SkillPrompt[] - ): void => { + const pushSkillDeclarations = (basePath: string, scope: 'project' | 'global', filteredSkills: readonly SkillPrompt[]): void => { for (const skill of filteredSkills) { const normalizedSkillName = this.validateAndNormalizeSkillName(this.getSkillName(skill)) const skillDir = path.join(basePath, SKILLS_SUBDIR, normalizedSkillName) @@ -218,6 +217,18 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { } } + const pushMcpDeclaration = (basePath: string, scope: 'project' | 'global', _filteredSkills: readonly SkillPrompt[]): void => { + void _filteredSkills + declarations.push({ + path: path.join(basePath, OPENCODE_CONFIG_FILE), + scope, + source: { + kind: 'mcpConfig', + mcpServers: {} + } satisfies OpencodeOutputSource + }) + } + const transformOptions = this.getTransformOptionsFromContext(ctx, {includeSeriesPrefix: true}) for (const project of promptProjects) { const projectRootDir = this.resolveProjectRootDir(ctx, project) @@ -284,8 +295,10 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { const filteredSkills = filterByProjectConfig(selectedSkills.items, project.projectConfig, 'skills') if (selectedSkills.selectedScope === 'project') pushSkillDeclarations(basePath, 'project', filteredSkills) - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') - if (selectedMcpSkills.selectedScope === 'project') pushMcpDeclaration(basePath, 'project', filteredMcpSkills) + if (selectedMcpSkills.selectedScope === 'project') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, project.projectConfig, 'skills') + pushMcpDeclaration(basePath, 'project', filteredMcpSkills) + } } } @@ -316,24 +329,64 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { pushSkillDeclarations(globalDir, 'global', filteredSkills) } - if (selectedMcpSkills.selectedScope !== 'global') return declarations + if (selectedMcpSkills.selectedScope === 'global') { + const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') + pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) + } + + // Keep opencode.json managed so the generated config can preserve user fields + // while normalizing the MCP section to an empty object. + + if (rules == null || rules.length === 0) return declarations - const filteredMcpSkills = filterByProjectConfig(selectedMcpSkills.items, promptSourceProjectConfig, 'skills') - pushMcpDeclaration(globalDir, 'global', filteredMcpSkills) + const activeRuleScopes = this.selectRuleScopes(ctx, rules) + for (const ruleScope of activeRuleScopes) { + if (ruleScope === 'global') { + const globalRules = rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'global') + for (const rule of globalRules) { + declarations.push({ + path: path.join(globalDir, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'global', + source: {kind: 'rule', rule} satisfies OpencodeOutputSource + }) + } + } else if (ruleScope === 'project') { + for (const project of this.getProjectOutputProjects(ctx)) { + const projectRootDir = this.resolveProjectRootDir(ctx, project) + if (projectRootDir == null) continue + const basePath = path.join(projectRootDir, PROJECT_RULES_DIR) + + const projectRules = applySubSeriesGlobPrefix( + filterByProjectConfig( + rules.filter(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule)) === 'project'), + project.projectConfig, + 'rules' + ), + project.projectConfig + ) + for (const rule of projectRules) { + declarations.push({ + path: path.join(basePath, RULES_SUBDIR, this.buildRuleFileName(rule)), + scope: 'project', + source: {kind: 'rule', rule} satisfies OpencodeOutputSource + }) + } + } + } + } return declarations } - override async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { + override async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { const source = declaration.source as OpencodeOutputSource switch (source.kind) { case 'globalMemory': case 'projectRootMemory': case 'projectChildMemory': - case 'skillReference': return source.content - case 'command': return this.buildCommandContent(source.command, ctx) + case 'skillReference': + return source.content + case 'command': + return this.buildCommandContent(source.command, ctx) case 'subAgent': { const frontMatter = this.buildOpencodeAgentFrontMatter(source.agent) return this.buildMarkdownContent(source.agent.content, frontMatter, ctx) @@ -342,14 +395,22 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { const frontMatter = this.buildOpencodeSkillFrontMatter(source.skill, source.normalizedSkillName) return this.buildMarkdownContent(source.skill.content as string, frontMatter, ctx) } - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'skillResource': + return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content case 'mcpConfig': - return JSON.stringify({ - $schema: 'https://opencode.ai/config.json', - plugin: [OPENCODE_RULES_PLUGIN_NAME], - mcp: source.mcpServers - }, null, 2) - default: throw new Error(`Unsupported declaration source for ${this.name}`) + return JSON.stringify( + { + $schema: 'https://opencode.ai/config.json', + plugin: [OPENCODE_RULES_PLUGIN_NAME], + mcp: {} + }, + null, + 2 + ) + case 'rule': + return this.buildRuleContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for ${this.name}`) } } @@ -400,7 +461,18 @@ export class OpencodeCLIOutputPlugin extends AbstractOutputPlugin { if (source?.[field] != null) metadata[field] = source[field] } - const reservedFields = new Set(['name', 'description', 'license', 'compatibility', 'namingCase', 'allowTools', 'keywords', 'displayName', 'author', 'version']) + const reservedFields = new Set([ + 'name', + 'description', + 'license', + 'compatibility', + 'namingCase', + 'allowTools', + 'keywords', + 'displayName', + 'author', + 'version' + ]) for (const [key, value] of Object.entries(source ?? {})) { if (!reservedFields.has(key)) metadata[key] = value } diff --git a/cli/src/plugins/PromptMarkdownCleanup.test.ts b/cli/src/plugins/PromptMarkdownCleanup.test.ts index dac280ea..032e5797 100644 --- a/cli/src/plugins/PromptMarkdownCleanup.test.ts +++ b/cli/src/plugins/PromptMarkdownCleanup.test.ts @@ -1,4 +1,9 @@ -import type {OutputCleanContext, OutputPlugin, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt} from './plugin-core' +import type { + OutputCleanContext, + OutputPlugin, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt +} from './plugin-core' import * as fs from 'node:fs' import * as os from 'node:os' import * as path from 'node:path' @@ -117,7 +122,14 @@ function createCleanContext(workspaceDir: string): OutputCleanContext { getAbsolutePath: () => path.join(workspaceDir, 'aindex') }, rootMemoryPrompt: createRootPrompt('prompt-source root'), - childMemoryPrompts: [createChildPrompt(workspaceDir, 'aindex', 'commands', 'prompt-source child')] + childMemoryPrompts: [ + createChildPrompt( + workspaceDir, + 'aindex', + 'commands', + 'prompt-source child' + ) + ] }, { name: 'project-a', @@ -129,7 +141,14 @@ function createCleanContext(workspaceDir: string): OutputCleanContext { getAbsolutePath: () => path.join(workspaceDir, 'project-a') }, rootMemoryPrompt: createRootPrompt('project root'), - childMemoryPrompts: [createChildPrompt(workspaceDir, 'project-a', 'commands', 'project child')] + childMemoryPrompts: [ + createChildPrompt( + workspaceDir, + 'project-a', + 'commands', + 'project child' + ) + ] } ] } @@ -139,14 +158,31 @@ function createCleanContext(workspaceDir: string): OutputCleanContext { describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { it('cleans workspace and non-prompt project markdown outputs without touching prompt-source paths', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), `tnmsc-${fileName.toLowerCase()}-cleanup-`)) + const tempDir = fs.mkdtempSync( + path.join(os.tmpdir(), `tnmsc-${fileName.toLowerCase()}-cleanup-`) + ) const workspaceDir = path.join(tempDir, 'workspace') const workspaceFile = path.join(workspaceDir, fileName) const promptSourceRootFile = path.join(workspaceDir, 'aindex', fileName) - const promptSourceChildFile = path.join(workspaceDir, 'aindex', 'commands', fileName) + const promptSourceChildFile = path.join( + workspaceDir, + 'aindex', + 'commands', + fileName + ) const projectRootFile = path.join(workspaceDir, 'project-a', fileName) - const projectChildFile = path.join(workspaceDir, 'project-a', 'commands', fileName) - const manualProjectChildFile = path.join(workspaceDir, 'project-a', 'docs', fileName) + const projectChildFile = path.join( + workspaceDir, + 'project-a', + 'commands', + fileName + ) + const manualProjectChildFile = path.join( + workspaceDir, + 'project-a', + 'docs', + fileName + ) fs.mkdirSync(path.dirname(promptSourceChildFile), {recursive: true}) fs.mkdirSync(path.dirname(manualProjectChildFile), {recursive: true}) @@ -159,19 +195,28 @@ describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { fs.writeFileSync(manualProjectChildFile, '# manual child', 'utf8') try { - const result = await collectDeletionTargets([createPlugin()], createCleanContext(workspaceDir)) - const normalizedFilesToDelete = result.filesToDelete.map(target => target.replaceAll('\\', '/')) + const result = await collectDeletionTargets( + [createPlugin()], + createCleanContext(workspaceDir) + ) + const normalizedFilesToDelete = result.filesToDelete.map(target => + target.replaceAll('\\', '/')) - expect(normalizedFilesToDelete).toEqual(expect.arrayContaining([ - workspaceFile.replaceAll('\\', '/'), - projectRootFile.replaceAll('\\', '/'), - projectChildFile.replaceAll('\\', '/') - ])) - expect(normalizedFilesToDelete).not.toContain(manualProjectChildFile.replaceAll('\\', '/')) - expect(normalizedFilesToDelete).not.toContain(promptSourceRootFile.replaceAll('\\', '/')) - expect(normalizedFilesToDelete).not.toContain(promptSourceChildFile.replaceAll('\\', '/')) - } - finally { + expect(normalizedFilesToDelete).toEqual( + expect.arrayContaining([ + workspaceFile.replaceAll('\\', '/'), + projectRootFile.replaceAll('\\', '/'), + projectChildFile.replaceAll('\\', '/'), + manualProjectChildFile.replaceAll('\\', '/') + ]) + ) + expect(normalizedFilesToDelete).not.toContain( + promptSourceRootFile.replaceAll('\\', '/') + ) + expect(normalizedFilesToDelete).not.toContain( + promptSourceChildFile.replaceAll('\\', '/') + ) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) @@ -179,7 +224,9 @@ describe.each(TEST_CASES)('$name cleanup', ({fileName, createPlugin}) => { describe('claudeCodeCLIOutputPlugin cleanup', () => { it('keeps project-scope .claude cleanup directories registered', async () => { - const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-claude-cleanup-')) + const tempDir = fs.mkdtempSync( + path.join(os.tmpdir(), 'tnmsc-claude-cleanup-') + ) const workspaceDir = path.join(tempDir, 'workspace') const projectClaudeDirs = [ path.join(workspaceDir, 'project-a', '.claude', 'rules'), @@ -193,14 +240,19 @@ describe('claudeCodeCLIOutputPlugin cleanup', () => { } try { - const result = await collectDeletionTargets([new ClaudeCodeCLIOutputPlugin()], createCleanContext(workspaceDir)) - const normalizedDirsToDelete = result.dirsToDelete.map(target => target.replaceAll('\\', '/')) + const result = await collectDeletionTargets( + [new ClaudeCodeCLIOutputPlugin()], + createCleanContext(workspaceDir) + ) + const normalizedDirsToDelete = result.dirsToDelete.map(target => + target.replaceAll('\\', '/')) - expect(normalizedDirsToDelete).toEqual(expect.arrayContaining( - projectClaudeDirs.map(target => target.replaceAll('\\', '/')) - )) - } - finally { + expect(normalizedDirsToDelete).toEqual( + expect.arrayContaining( + projectClaudeDirs.map(target => target.replaceAll('\\', '/')) + ) + ) + } finally { fs.rmSync(tempDir, {recursive: true, force: true}) } }) diff --git a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts index 652f928b..33874f07 100644 --- a/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts +++ b/cli/src/plugins/plugin-core/AbstractOutputPlugin.ts @@ -1,7 +1,34 @@ import type {BuildPromptTomlArtifactOptions} from '@truenine/md-compiler' import type {ToolPresetName} from './GlobalScopeCollector' import type {RegistryWriter} from './RegistryWriter' -import type {CommandPrompt, CommandSeriesPluginOverride, ILogger, OutputCleanContext, OutputCleanupDeclarations, OutputCleanupPathDeclaration, OutputCleanupScope, OutputDeclarationScope, OutputFileDeclaration, OutputPlugin, OutputPluginCapabilities, OutputPluginContext, OutputScopeSelection, OutputScopeTopic, OutputTopicCapability, OutputWriteContext, Path, Project, ProjectConfig, RegistryData, RegistryOperationResult, RulePrompt, RuleScope, SkillPrompt, SubAgentPrompt, WslMirrorFileDeclaration} from './types' +import type { + CommandPrompt, + CommandSeriesPluginOverride, + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCleanupPathDeclaration, + OutputCleanupScope, + OutputDeclarationScope, + OutputFileDeclaration, + OutputPlugin, + OutputPluginCapabilities, + OutputPluginContext, + OutputScopeSelection, + OutputScopeTopic, + OutputTopicCapability, + OutputWriteContext, + Path, + Project, + ProjectConfig, + RegistryData, + RegistryOperationResult, + RulePrompt, + RuleScope, + SkillPrompt, + SubAgentPrompt, + WslMirrorFileDeclaration +} from './types' import {Buffer} from 'node:buffer' import * as path from 'node:path' @@ -12,10 +39,7 @@ import {buildConfigDiagnostic, diagnosticLines} from '@/diagnostics' import {getEffectiveHomeDir} from '@/runtime-environment' import {AbstractPlugin} from './AbstractPlugin' import {FilePathKind, PluginKind} from './enums' -import { - applySubSeriesGlobPrefix, - filterByProjectConfig -} from './filters' +import {applySubSeriesGlobPrefix, filterByProjectConfig} from './filters' import {GlobalScopeCollector} from './GlobalScopeCollector' import {compileRawPromptArtifact} from './PromptArtifactCache' import {resolveSkillName, resolveSubAgentCanonicalName} from './PromptIdentity' @@ -74,10 +98,13 @@ export interface CommandOutputConfig { /** Commands subdirectory, default 'commands' */ readonly subDir?: string /** Custom command frontmatter transformer */ - readonly transformFrontMatter?: (cmd: CommandPrompt, context: { - readonly sourceFrontMatter?: Record - readonly isRecompiled: boolean - }) => Record + readonly transformFrontMatter?: ( + cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + } + ) => Record /** Allowed command source scopes, default ['project', 'global'] */ readonly sourceScopes?: readonly OutputDeclarationScope[] /** Optional source-scope remap before output selection */ @@ -114,9 +141,12 @@ export interface SubAgentsOutputConfig extends ScopedSourceConfig { /** Preferred root-level field order for emitted artifact */ readonly fieldOrder?: readonly string[] /** Optional frontmatter transformer */ - readonly transformFrontMatter?: (subAgent: SubAgentPrompt, context: { - readonly sourceFrontMatter?: Record - }) => Record + readonly transformFrontMatter?: ( + subAgent: SubAgentPrompt, + context: { + readonly sourceFrontMatter?: Record + } + ) => Record } /** @@ -227,7 +257,11 @@ type DeclarativeOutputSource | {readonly kind: 'subAgent', readonly subAgent: SubAgentPrompt} | {readonly kind: 'skillMain', readonly skill: SkillPrompt} | {readonly kind: 'skillReference', readonly content: string} - | {readonly kind: 'skillResource', readonly content: string, readonly encoding: 'text' | 'base64'} + | { + readonly kind: 'skillResource' + readonly content: string + readonly encoding: 'text' | 'base64' + } | {readonly kind: 'rule', readonly rule: RulePrompt} | {readonly kind: 'ignoreFile', readonly content: string} @@ -246,10 +280,13 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out protected readonly commandsConfig: { readonly subDir: string - readonly transformFrontMatter?: (cmd: CommandPrompt, context: { - readonly sourceFrontMatter?: Record - readonly isRecompiled: boolean - }) => Record + readonly transformFrontMatter?: ( + cmd: CommandPrompt, + context: { + readonly sourceFrontMatter?: Record + readonly isRecompiled: boolean + } + ) => Record readonly sourceScopes: readonly OutputDeclarationScope[] readonly scopeRemap?: Partial> } @@ -268,9 +305,12 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out readonly extraFields?: Readonly> readonly fieldOrder?: readonly string[] readonly scopeRemap?: Partial> - readonly transformFrontMatter?: (subAgent: SubAgentPrompt, context: { - readonly sourceFrontMatter?: Record - }) => Record + readonly transformFrontMatter?: ( + subAgent: SubAgentPrompt, + context: { + readonly sourceFrontMatter?: Record + } + ) => Record } protected readonly commandOutputEnabled: boolean @@ -326,25 +366,21 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out this.wslMirrorPaths = options?.wslMirrors ?? [] this.supportsBlankLineAfterFrontMatter = options?.supportsBlankLineAfterFrontMatter ?? true - this.outputCapabilities = options?.capabilities != null - ? this.normalizeCapabilities(options.capabilities) - : this.buildInferredCapabilities() + this.outputCapabilities = options?.capabilities != null ? this.normalizeCapabilities(options.capabilities) : this.buildInferredCapabilities() } - private createCommandsConfig( - config?: CommandOutputConfig - ): AbstractOutputPlugin['commandsConfig'] { + private createCommandsConfig(config?: CommandOutputConfig): AbstractOutputPlugin['commandsConfig'] { return { subDir: config?.subDir ?? 'commands', sourceScopes: config?.sourceScopes ?? ['project', 'global'], ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, - ...config?.transformFrontMatter != null && {transformFrontMatter: config.transformFrontMatter} + ...config?.transformFrontMatter != null && { + transformFrontMatter: config.transformFrontMatter + } } } - private createSubAgentsConfig( - config?: SubAgentsOutputConfig - ): AbstractOutputPlugin['subAgentsConfig'] { + private createSubAgentsConfig(config?: SubAgentsOutputConfig): AbstractOutputPlugin['subAgentsConfig'] { return { subDir: config?.subDir ?? 'agents', sourceScopes: config?.sourceScopes ?? ['project', 'global'], @@ -353,19 +389,25 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out ext: config?.ext ?? '.md', artifactFormat: config?.artifactFormat ?? 'markdown', fileNameSource: config?.fileNameSource ?? 'derivedPath', - ...config?.bodyFieldName != null && {bodyFieldName: config.bodyFieldName}, - ...config?.fieldNameMap != null && {fieldNameMap: config.fieldNameMap}, - ...config?.excludedFrontMatterFields != null && {excludedFrontMatterFields: config.excludedFrontMatterFields}, + ...config?.bodyFieldName != null && { + bodyFieldName: config.bodyFieldName + }, + ...config?.fieldNameMap != null && { + fieldNameMap: config.fieldNameMap + }, + ...config?.excludedFrontMatterFields != null && { + excludedFrontMatterFields: config.excludedFrontMatterFields + }, ...config?.extraFields != null && {extraFields: config.extraFields}, ...config?.fieldOrder != null && {fieldOrder: config.fieldOrder}, ...config?.scopeRemap != null && {scopeRemap: config.scopeRemap}, - ...config?.transformFrontMatter != null && {transformFrontMatter: config.transformFrontMatter} + ...config?.transformFrontMatter != null && { + transformFrontMatter: config.transformFrontMatter + } } } - private createSkillsConfig( - config?: SkillsOutputConfig - ): AbstractOutputPlugin['skillsConfig'] { + private createSkillsConfig(config?: SkillsOutputConfig): AbstractOutputPlugin['skillsConfig'] { return { subDir: config?.subDir ?? 'skills', sourceScopes: config?.sourceScopes ?? ['project', 'global'], @@ -414,9 +456,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return capabilities } - private normalizeCapabilities( - capabilities: OutputPluginCapabilities - ): OutputPluginCapabilities { + private normalizeCapabilities(capabilities: OutputPluginCapabilities): OutputPluginCapabilities { const normalizedCapabilities: OutputPluginCapabilities = {} for (const topic of OUTPUT_SCOPE_TOPICS) { const capability = capabilities[topic] @@ -428,9 +468,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return normalizedCapabilities } - private normalizeCapability( - capability: OutputTopicCapability - ): OutputTopicCapability | undefined { + private normalizeCapability(capability: OutputTopicCapability): OutputTopicCapability | undefined { const uniqueScopes: OutputDeclarationScope[] = [] for (const scope of capability.scopes) { if (!uniqueScopes.includes(scope)) uniqueScopes.push(scope) @@ -473,21 +511,17 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return ctx.collectedOutputContext.workspace.projects.find(project => project.isWorkspaceRootProject === true) } - protected resolveProjectRootDir( - ctx: OutputPluginContext | OutputWriteContext, - project: Project - ): string | undefined { - if (project.isWorkspaceRootProject === true) return this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) + protected resolveProjectRootDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { + if (project.isWorkspaceRootProject === true) { + return this.resolveDirectoryPath(ctx.collectedOutputContext.workspace.directory) + } const projectDir = project.dirFromWorkspacePath if (projectDir == null) return void 0 return this.resolveDirectoryPath(projectDir) } - protected resolveProjectConfigDir( - ctx: OutputPluginContext | OutputWriteContext, - project: Project - ): string | undefined { + protected resolveProjectConfigDir(ctx: OutputPluginContext | OutputWriteContext, project: Project): string | undefined { const projectRootDir = this.resolveProjectRootDir(ctx, project) if (projectRootDir == null) return void 0 if (this.globalConfigDir.length === 0) return projectRootDir @@ -512,7 +546,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out protected resolveDirectoryPath(targetPath: Path): string { if (targetPath.pathKind === FilePathKind.Absolute) return targetPath.path - if ('basePath' in targetPath) return path.resolve(targetPath.basePath as string, targetPath.path) + if ('basePath' in targetPath) { + return path.resolve(targetPath.basePath as string, targetPath.path) + } return path.resolve(process.cwd(), targetPath.path) } @@ -521,11 +557,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return path.join(workspaceDir, this.globalConfigDir) } - protected createRelativePath( - pathStr: string, - basePath: string, - dirNameFn: () => string - ): string { + protected createRelativePath(pathStr: string, basePath: string, dirNameFn: () => string): string { void dirNameFn return path.join(basePath, pathStr) } @@ -540,7 +572,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out protected getXdgConfigHomeDir(): string { const xdgConfigHome = process.env['XDG_CONFIG_HOME'] - if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) return xdgConfigHome + if (typeof xdgConfigHome === 'string' && xdgConfigHome.trim().length > 0) { + return xdgConfigHome + } return path.join(this.getHomeDir(), '.config') } @@ -560,19 +594,13 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return path.dirname(p) } - protected buildProjectPromptCleanupTargets( - ctx: OutputCleanContext, - fileName: string = this.outputFileName - ): readonly OutputCleanupPathDeclaration[] { + protected buildProjectPromptCleanupTargets(ctx: OutputCleanContext, fileName: string = this.outputFileName): readonly OutputCleanupPathDeclaration[] { if (fileName.length === 0) return [] const declarations: OutputCleanupPathDeclaration[] = [] const seenPaths = new Set() - const pushCleanupFile = ( - targetPath: string, - label: string - ): void => { + const pushCleanupFile = (targetPath: string, label: string): void => { if (seenPaths.has(targetPath)) return seenPaths.add(targetPath) declarations.push({ @@ -587,14 +615,20 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out const projectRootDir = this.resolveProjectRootDir(ctx, project) if (projectRootDir == null) continue + // Add glob pattern to match all files with the given name in the project directory + // This ensures files in subdirectories not explicitly tracked as childMemoryPrompts are also cleaned up + declarations.push({ + path: this.resolvePath(projectRootDir, '**', fileName), + kind: 'glob', + scope: 'project', + label: 'delete.project.glob' + }) + pushCleanupFile(this.resolvePath(projectRootDir, fileName), 'delete.project') if (project.childMemoryPrompts == null) continue for (const child of project.childMemoryPrompts) { - pushCleanupFile( - this.resolveFullPath(child.dir, fileName), - 'delete.project.child' - ) + pushCleanupFile(this.resolveFullPath(child.dir, fileName), 'delete.project.child') } } @@ -610,10 +644,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return this.indexignore } - private resolveCleanupScopeBasePaths( - scope: OutputCleanupScope, - ctx: OutputCleanContext - ): readonly string[] { + private resolveCleanupScopeBasePaths(scope: OutputCleanupScope, ctx: OutputCleanContext): readonly string[] { if (scope === 'global') return [this.getHomeDir()] if (scope === 'xdgConfig') return [this.getXdgConfigHomeDir()] @@ -629,7 +660,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out private resolveCleanupDeclaredPath(basePath: string, declaredPath: string): string { if (path.isAbsolute(declaredPath)) return path.resolve(declaredPath) if (declaredPath === '~') return this.getHomeDir() - if (declaredPath.startsWith('~/') || declaredPath.startsWith('~\\')) return path.resolve(this.getHomeDir(), declaredPath.slice(2)) + if (declaredPath.startsWith('~/') || declaredPath.startsWith('~\\')) { + return path.resolve(this.getHomeDir(), declaredPath.slice(2)) + } return path.resolve(basePath, declaredPath) } @@ -647,19 +680,13 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out const declarations: OutputCleanupPathDeclaration[] = [] const scopes: readonly OutputCleanupScope[] = ['project', 'global', 'xdgConfig'] - const pushTargets = ( - scope: OutputCleanupScope, - targetKind: 'file' | 'directory' | 'glob', - entries: readonly string[] | undefined - ): void => { + const pushTargets = (scope: OutputCleanupScope, targetKind: 'file' | 'directory' | 'glob', entries: readonly string[] | undefined): void => { if (entries == null || entries.length === 0) return const basePaths = this.resolveCleanupScopeBasePaths(scope, ctx) for (const entry of entries) { for (const basePath of basePaths) { - const resolved = path.isAbsolute(entry) - ? path.resolve(entry) - : this.resolveCleanupDeclaredPath(basePath, entry) + const resolved = path.isAbsolute(entry) ? path.resolve(entry) : this.resolveCleanupDeclaredPath(basePath, entry) declarations.push({ path: targetKind === 'glob' ? this.normalizeGlobPattern(resolved) : resolved, @@ -687,23 +714,16 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return ctx?.pluginOptions?.frontMatter?.blankLineAfter ?? true } - protected buildMarkdownContent( - content: string, - frontMatter?: Record, - ctx?: OutputPluginContext - ): string { + protected buildMarkdownContent(content: string, frontMatter?: Record, ctx?: OutputPluginContext): string { return buildMarkdownWithFrontMatter(frontMatter, content, { blankLineAfter: this.resolveFrontMatterBlankLineAfter(ctx) }) } - protected buildMarkdownContentWithRaw( - content: string, - frontMatter?: Record, - rawFrontMatter?: string, - ctx?: OutputPluginContext - ): string { - if (frontMatter != null && Object.keys(frontMatter).length > 0) return this.buildMarkdownContent(content, frontMatter, ctx) // If we have parsed front matter, use it + protected buildMarkdownContentWithRaw(content: string, frontMatter?: Record, rawFrontMatter?: string, ctx?: OutputPluginContext): string { + if (frontMatter != null && Object.keys(frontMatter).length > 0) { + return this.buildMarkdownContent(content, frontMatter, ctx) + } // If we have parsed front matter, use it if (rawFrontMatter != null && rawFrontMatter.length > 0) { return buildMarkdownWithRawFrontMatter(rawFrontMatter, content, { @@ -722,22 +742,18 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return ctx.collectedOutputContext.globalMemory?.content as string | undefined } - protected combineGlobalWithContent( - globalContent: string | undefined, - projectContent: string, - options?: CombineOptions - ): string { - const { - separator = '\n\n', - skipIfEmpty = true, - position = 'before' - } = options ?? {} + protected combineGlobalWithContent(globalContent: string | undefined, projectContent: string, options?: CombineOptions): string { + const {separator = '\n\n', skipIfEmpty = true, position = 'before'} = options ?? {} - if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) return projectContent // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true + if (skipIfEmpty && (globalContent == null || globalContent.trim().length === 0)) { + return projectContent + } // Skip if global content is undefined/null or empty/whitespace when skipIfEmpty is true const effectiveGlobalContent = globalContent ?? '' // If global content is null/undefined but skipIfEmpty is false, treat as empty string - if (position === 'after') return `${projectContent}${separator}${effectiveGlobalContent}` // Combine based on position + if (position === 'after') { + return `${projectContent}${separator}${effectiveGlobalContent}` + } // Combine based on position return `${effectiveGlobalContent}${separator}${projectContent}` // Default: 'before' } @@ -750,21 +766,17 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return resolveSubAgentCanonicalName(subAgent) } - protected transformCommandName( - cmd: CommandPrompt, - options?: CommandNameTransformOptions - ): string { + protected transformCommandName(cmd: CommandPrompt, options?: CommandNameTransformOptions): string { const {includeSeriesPrefix = true, seriesSeparator = '-'} = options ?? {} - if (!includeSeriesPrefix || cmd.commandPrefix == null) return `${cmd.commandName}.md` // If prefix should not be included or prefix is not present, return just commandName + if (!includeSeriesPrefix || cmd.commandPrefix == null) { + return `${cmd.commandName}.md` + } // If prefix should not be included or prefix is not present, return just commandName return `${cmd.commandPrefix}${seriesSeparator}${cmd.commandName}.md` } - protected transformSubAgentName( - subAgent: SubAgentPrompt, - options?: SubAgentNameTransformOptions - ): string { + protected transformSubAgentName(subAgent: SubAgentPrompt, options?: SubAgentNameTransformOptions): string { const {fileNameSource} = this.subAgentsConfig const includePrefix = options?.includePrefix ?? this.subAgentsConfig.includePrefix const linkSymbol = options?.linkSymbol ?? this.subAgentsConfig.linkSymbol @@ -787,9 +799,13 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out }) let normalized = sanitizedCharacters.join('') - while (normalized.endsWith('.') || normalized.endsWith(' ')) normalized = normalized.slice(0, -1) + while (normalized.endsWith('.') || normalized.endsWith(' ')) { + normalized = normalized.slice(0, -1) + } - if (normalized.length === 0) throw new Error(`Cannot derive a valid output file name from "${value}"`) + if (normalized.length === 0) { + throw new Error(`Cannot derive a valid output file name from "${value}"`) + } return normalized } @@ -798,21 +814,23 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out if (this.warnedDeprecatedSubAgentFileNameSource) return this.warnedDeprecatedSubAgentFileNameSource = true - this.log.warn(buildConfigDiagnostic({ - code: 'SUBAGENT_FRONTMATTER_NAME_SOURCE_DEPRECATED', - title: 'Sub-agent fileNameSource="frontMatterName" now resolves from derived names', - reason: diagnosticLines( - `The ${this.name} plugin no longer reads authored sub-agent front matter names.`, - 'tnmsc now derives sub-agent names from the sub-agent path.' - ), - exactFix: diagnosticLines( - 'Remove authored `name` fields from sub-agent sources.', - 'Keep using `fileNameSource="frontMatterName"` only as a temporary alias for the derived-path naming behavior.' - ), - details: { - plugin: this.name - } - })) + this.log.warn( + buildConfigDiagnostic({ + code: 'SUBAGENT_FRONTMATTER_NAME_SOURCE_DEPRECATED', + title: 'Sub-agent fileNameSource="frontMatterName" now resolves from derived names', + reason: diagnosticLines( + `The ${this.name} plugin no longer reads authored sub-agent front matter names.`, + 'tnmsc now derives sub-agent names from the sub-agent path.' + ), + exactFix: diagnosticLines( + 'Remove authored `name` fields from sub-agent sources.', + 'Keep using `fileNameSource="frontMatterName"` only as a temporary alias for the derived-path naming behavior.' + ), + details: { + plugin: this.name + } + }) + ) } protected appendSubAgentDeclarations( @@ -881,7 +899,10 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out declarations.push({ path: path.join(skillDir, childDoc.dir.path.replace(/\.mdx$/, '.md')), scope, - source: {kind: 'skillReference', content: childDoc.content as string} + source: { + kind: 'skillReference', + content: childDoc.content as string + } }) } } @@ -891,19 +912,18 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out declarations.push({ path: path.join(skillDir, resource.relativePath), scope, - source: {kind: 'skillResource', content: resource.content, encoding: resource.encoding} + source: { + kind: 'skillResource', + content: resource.content, + encoding: resource.encoding + } }) } } } } - protected appendRuleDeclarations( - declarations: OutputFileDeclaration[], - basePath: string, - scope: OutputDeclarationScope, - rules: readonly RulePrompt[] - ): void { + protected appendRuleDeclarations(declarations: OutputFileDeclaration[], basePath: string, scope: OutputDeclarationScope, rules: readonly RulePrompt[]): void { const rulesDir = path.join(basePath, this.rulesConfig.subDir ?? 'rules') for (const rule of rules) { @@ -915,21 +935,28 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out } } - protected buildSubAgentTomlContent( - agent: SubAgentPrompt, - frontMatter: Record | undefined - ): string { + protected buildSubAgentTomlContent(agent: SubAgentPrompt, frontMatter: Record | undefined): string { const {bodyFieldName} = this.subAgentsConfig - if (bodyFieldName == null || bodyFieldName.length === 0) throw new Error(`subagents.bodyFieldName is required when artifactFormat="toml" for ${this.name}`) + if (bodyFieldName == null || bodyFieldName.length === 0) { + throw new Error(`subagents.bodyFieldName is required when artifactFormat="toml" for ${this.name}`) + } return this.buildTomlContent({ content: agent.content, bodyFieldName, ...frontMatter != null && {frontMatter}, - ...this.subAgentsConfig.fieldNameMap != null && {fieldNameMap: this.subAgentsConfig.fieldNameMap}, - ...this.subAgentsConfig.excludedFrontMatterFields != null && {excludedKeys: this.subAgentsConfig.excludedFrontMatterFields}, - ...this.subAgentsConfig.extraFields != null && {extraFields: this.subAgentsConfig.extraFields}, - ...this.subAgentsConfig.fieldOrder != null && {fieldOrder: this.subAgentsConfig.fieldOrder} + ...this.subAgentsConfig.fieldNameMap != null && { + fieldNameMap: this.subAgentsConfig.fieldNameMap + }, + ...this.subAgentsConfig.excludedFrontMatterFields != null && { + excludedKeys: this.subAgentsConfig.excludedFrontMatterFields + }, + ...this.subAgentsConfig.extraFields != null && { + extraFields: this.subAgentsConfig.extraFields + }, + ...this.subAgentsConfig.fieldOrder != null && { + fieldOrder: this.subAgentsConfig.fieldOrder + } }) } @@ -940,22 +967,23 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out const includeSeriesPrefix = pluginOverride?.includeSeriesPrefix ?? globalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Plugin-specific overrides take precedence over global settings const seriesSeparator = pluginOverride?.seriesSeparator - if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null && seriesSeparator != null) { + return {includeSeriesPrefix, seriesSeparator} + } // Build result object conditionally to avoid assigning undefined to readonly properties if (includeSeriesPrefix != null) return {includeSeriesPrefix} if (seriesSeparator != null) return {seriesSeparator} return {} } - protected getTransformOptionsFromContext( - ctx: OutputWriteContext, - additionalOptions?: CommandNameTransformOptions - ): CommandNameTransformOptions { + protected getTransformOptionsFromContext(ctx: OutputWriteContext, additionalOptions?: CommandNameTransformOptions): CommandNameTransformOptions { const seriesOptions = this.getCommandSeriesOptions(ctx) const includeSeriesPrefix = seriesOptions.includeSeriesPrefix ?? additionalOptions?.includeSeriesPrefix // Only include properties that have defined values to satisfy exactOptionalPropertyTypes // Merge: additionalOptions (plugin defaults) <- seriesOptions (config overrides) const seriesSeparator = seriesOptions.seriesSeparator ?? additionalOptions?.seriesSeparator - if (includeSeriesPrefix != null && seriesSeparator != null) return {includeSeriesPrefix, seriesSeparator} // Build result object conditionally to avoid assigning undefined to readonly properties + if (includeSeriesPrefix != null && seriesSeparator != null) { + return {includeSeriesPrefix, seriesSeparator} + } // Build result object conditionally to avoid assigning undefined to readonly properties if (includeSeriesPrefix != null) return {includeSeriesPrefix} if (seriesSeparator != null) return {seriesSeparator} return {} @@ -967,13 +995,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return registeredPlugins.includes(precedingPluginName) } - protected getRegistryWriter< - TEntry, - TRegistry extends RegistryData, - T extends RegistryWriter - >( - WriterClass: new (logger: ILogger) => T - ): T { + protected getRegistryWriter>(WriterClass: new (logger: ILogger) => T): T { const cacheKey = WriterClass.name const cached = this.registryWriterCache.get(cacheKey) // Check cache first @@ -984,10 +1006,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return writer } - protected async registerInRegistry< - TEntry, - TRegistry extends RegistryData - >( + protected async registerInRegistry( writer: RegistryWriter, entries: readonly TEntry[], ctx: OutputWriteContext @@ -1032,7 +1051,10 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out sourceScopes: readonly OutputDeclarationScope[], resolveScope: (item: T) => OutputDeclarationScope, requestedScopes?: OutputScopeSelection - ): {readonly selectedScope?: OutputDeclarationScope, readonly items: readonly T[]} { + ): { + readonly selectedScope?: OutputDeclarationScope + readonly items: readonly T[] + } { if (items.length === 0) return {items: []} const availableScopes = [...new Set(items.map(resolveScope))] @@ -1052,10 +1074,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out } } - protected selectRuleScopes( - ctx: OutputWriteContext, - rules: readonly RulePrompt[] - ): readonly OutputDeclarationScope[] { + protected selectRuleScopes(ctx: OutputWriteContext, rules: readonly RulePrompt[]): readonly OutputDeclarationScope[] { const availableScopes = [...new Set(rules.map(rule => this.normalizeSourceScope(this.normalizeRuleScope(rule))))] return resolveTopicScopes({ requestedScopes: this.getTopicScopeOverride(ctx, 'rules'), @@ -1079,27 +1098,24 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out }) } - protected getTopicScopeOverride( - ctx: OutputPluginContext | OutputWriteContext, - topic: OutputScopeTopic - ): OutputScopeSelection | undefined { + protected getTopicScopeOverride(ctx: OutputPluginContext | OutputWriteContext, topic: OutputScopeTopic): OutputScopeSelection | undefined { return ctx.pluginOptions?.outputScopes?.plugins?.[this.name]?.[topic] } - protected buildSkillFrontMatter( - skill: SkillPrompt, - options?: SkillFrontMatterOptions - ): Record { + protected buildSkillFrontMatter(skill: SkillPrompt, options?: SkillFrontMatterOptions): Record { const fm = skill.yamlFrontMatter const result: Record = { name: this.getSkillName(skill), description: fm.description } - if ('displayName' in fm && fm.displayName != null) { // Conditionally add optional fields + if ('displayName' in fm && fm.displayName != null) { + // Conditionally add optional fields result['displayName'] = fm.displayName } - if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) result['keywords'] = fm.keywords + if ('keywords' in fm && fm.keywords != null && fm.keywords.length > 0) { + result['keywords'] = fm.keywords + } if ('author' in fm && fm.author != null) result['author'] = fm.author if ('version' in fm && fm.version != null) result['version'] = fm.version @@ -1109,7 +1125,8 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out result['allowTools'] = toolFormat === 'string' ? fm.allowTools.join(',') : fm.allowTools } - if (options?.additionalFields != null) { // Add any additional custom fields + if (options?.additionalFields != null) { + // Add any additional custom fields Object.assign(result, options.additionalFields) } @@ -1117,13 +1134,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out } protected buildRuleContent(rule: RulePrompt, ctx?: OutputPluginContext): string { - const fmData = this.rulesConfig.transformFrontMatter - ? this.rulesConfig.transformFrontMatter(rule) - : {globs: rule.globs.join(', ')} + const fmData = this.rulesConfig.transformFrontMatter ? this.rulesConfig.transformFrontMatter(rule) : {globs: rule.globs.join(', ')} - const sanitizedFmData = fmData == null || Object.keys(fmData).length === 0 - ? void 0 - : fmData + const sanitizedFmData = fmData == null || Object.keys(fmData).length === 0 ? void 0 : fmData return this.buildMarkdownContent(rule.content, sanitizedFmData, ctx) } @@ -1131,15 +1144,6 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out protected buildRuleFileName(rule: RulePrompt): string { const prefix = `${this.rulesConfig.prefix ?? 'rule'}${this.rulesConfig.linkSymbol ?? '-'}` const fileName = `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` - this.log.trace('buildRuleFileName', { - plugin: this.name, - rulePrefix: rule.prefix, - ruleName: rule.ruleName, - prefix: this.rulesConfig.prefix ?? 'rule', - linkSymbol: this.rulesConfig.linkSymbol ?? '-', - ext: this.rulesConfig.ext ?? '.md', - result: fileName - }) return fileName } @@ -1152,7 +1156,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out const cleanupProtect = this.buildCleanupTargetsFromScopeConfig(this.cleanupConfig.protect, 'protect', ctx) const {excludeScanGlobs} = this.cleanupConfig - if (cleanupDelete.length === 0 && cleanupProtect.length === 0 && (excludeScanGlobs == null || excludeScanGlobs.length === 0)) return {} + if (cleanupDelete.length === 0 && cleanupProtect.length === 0 && (excludeScanGlobs == null || excludeScanGlobs.length === 0)) { + return {} + } return { ...cleanupDelete.length > 0 && {delete: cleanupDelete}, @@ -1166,10 +1172,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return this.wslMirrorPaths.map(sourcePath => ({sourcePath})) } - async convertContent( - declaration: OutputFileDeclaration, - ctx: OutputWriteContext - ): Promise { + async convertContent(declaration: OutputFileDeclaration, ctx: OutputWriteContext): Promise { const source = declaration.source as DeclarativeOutputSource switch (source.kind) { @@ -1177,66 +1180,62 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out case 'projectChildMemory': case 'globalMemory': case 'skillReference': - case 'ignoreFile': return source.content - case 'command': return this.buildCommandContent(source.command, ctx) - case 'subAgent': return this.buildSubAgentContent(source.subAgent, ctx) - case 'skillMain': return this.buildSkillMainContent(source.skill, ctx) - case 'skillResource': return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content - case 'rule': return this.buildRuleContent(source.rule, ctx) - default: throw new Error(`Unsupported declaration source for plugin ${this.name}`) + case 'ignoreFile': + return source.content + case 'command': + return this.buildCommandContent(source.command, ctx) + case 'subAgent': + return this.buildSubAgentContent(source.subAgent, ctx) + case 'skillMain': + return this.buildSkillMainContent(source.skill, ctx) + case 'skillResource': + return source.encoding === 'base64' ? Buffer.from(source.content, 'base64') : source.content + case 'rule': + return this.buildRuleContent(source.rule, ctx) + default: + throw new Error(`Unsupported declaration source for plugin ${this.name}`) } } protected async buildDefaultOutputDeclarations(ctx: OutputWriteContext): Promise { const declarations: OutputFileDeclaration[] = [] - const { - globalMemory, - commands, - subAgents, - skills, - rules, - aiAgentIgnoreConfigFiles - } = ctx.collectedOutputContext + const {globalMemory, commands, subAgents, skills, rules, aiAgentIgnoreConfigFiles} = ctx.collectedOutputContext const transformOptions = this.getTransformOptionsFromContext(ctx) const ignoreOutputPath = this.getIgnoreOutputPath() - const ignoreFile = this.indexignore == null - ? void 0 - : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) - const selectedCommands = this.commandOutputEnabled && commands != null - ? this.selectSingleScopeItems( - commands, - this.commandsConfig.sourceScopes, - cmd => this.resolveCommandSourceScope(cmd), - this.getTopicScopeOverride(ctx, 'commands') - ) - : {items: [] as readonly CommandPrompt[]} - - const selectedSubAgents = this.subAgentOutputEnabled && subAgents != null - ? this.selectSingleScopeItems( - subAgents, - this.subAgentsConfig.sourceScopes, - subAgent => this.resolveSubAgentSourceScope(subAgent), - this.getTopicScopeOverride(ctx, 'subagents') - ) - : {items: [] as readonly SubAgentPrompt[]} - - const selectedSkills = this.skillOutputEnabled && skills != null - ? this.selectSingleScopeItems( - skills, - this.skillsConfig.sourceScopes, - skill => this.resolveSkillSourceScope(skill), - this.getTopicScopeOverride(ctx, 'skills') - ) - : {items: [] as readonly SkillPrompt[]} + const ignoreFile = this.indexignore == null ? void 0 : aiAgentIgnoreConfigFiles?.find(file => file.fileName === this.indexignore) + const selectedCommands + = this.commandOutputEnabled && commands != null + ? this.selectSingleScopeItems( + commands, + this.commandsConfig.sourceScopes, + cmd => this.resolveCommandSourceScope(cmd), + this.getTopicScopeOverride(ctx, 'commands') + ) + : {items: [] as readonly CommandPrompt[]} + + const selectedSubAgents + = this.subAgentOutputEnabled && subAgents != null + ? this.selectSingleScopeItems( + subAgents, + this.subAgentsConfig.sourceScopes, + subAgent => this.resolveSubAgentSourceScope(subAgent), + this.getTopicScopeOverride(ctx, 'subagents') + ) + : {items: [] as readonly SubAgentPrompt[]} + + const selectedSkills + = this.skillOutputEnabled && skills != null + ? this.selectSingleScopeItems( + skills, + this.skillsConfig.sourceScopes, + skill => this.resolveSkillSourceScope(skill), + this.getTopicScopeOverride(ctx, 'skills') + ) + : {items: [] as readonly SkillPrompt[]} const allRules = rules ?? [] - const activeRuleScopes = this.ruleOutputEnabled && allRules.length > 0 - ? new Set(this.selectRuleScopes(ctx, allRules)) - : new Set() - const activePromptScopes = new Set(this.selectPromptScopes( - ctx, - this.outputCapabilities.prompt?.scopes ?? ['project', 'global'] - )) + const activeRuleScopes = this.ruleOutputEnabled && allRules.length > 0 ? new Set(this.selectRuleScopes(ctx, allRules)) : new Set() + const activePromptScopes = new Set(this.selectPromptScopes(ctx, this.outputCapabilities.prompt?.scopes ?? ['project', 'global'])) const rulesByScope: Record = { project: [], @@ -1252,16 +1251,15 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out const basePath = this.resolveProjectConfigDir(ctx, project) if (projectRootDir == null || basePath == null) continue - if ( - this.outputFileName.length > 0 - && activePromptScopes.has('project') - && this.isProjectPromptOutputTarget(project) - ) { + if (this.outputFileName.length > 0 && activePromptScopes.has('project') && this.isProjectPromptOutputTarget(project)) { if (project.rootMemoryPrompt != null) { declarations.push({ path: path.join(projectRootDir, this.outputFileName), scope: 'project', - source: {kind: 'projectRootMemory', content: project.rootMemoryPrompt.content as string} + source: { + kind: 'projectRootMemory', + content: project.rootMemoryPrompt.content as string + } }) } @@ -1270,7 +1268,10 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out declarations.push({ path: this.resolveFullPath(child.dir), scope: 'project', - source: {kind: 'projectChildMemory', content: child.content as string} + source: { + kind: 'projectChildMemory', + content: child.content as string + } }) } } @@ -1294,10 +1295,7 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out } if (activeRuleScopes.has('project')) { - const projectRules = applySubSeriesGlobPrefix( - filterByProjectConfig(rulesByScope.project, projectConfig, 'rules'), - projectConfig - ) + const projectRules = applySubSeriesGlobPrefix(filterByProjectConfig(rulesByScope.project, projectConfig, 'rules'), projectConfig) this.appendRuleDeclarations(declarations, basePath, 'project', projectRules) } @@ -1346,15 +1344,14 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out this.appendRuleDeclarations(declarations, basePath, ruleScope, filteredRules) } - if ( - globalMemory != null - && this.outputFileName.length > 0 - && activePromptScopes.has('global') - ) { + if (globalMemory != null && this.outputFileName.length > 0 && activePromptScopes.has('global')) { declarations.push({ path: path.join(this.getGlobalConfigDir(), this.outputFileName), scope: 'global', - source: {kind: 'globalMemory', content: globalMemory.content as string} + source: { + kind: 'globalMemory', + content: globalMemory.content as string + } }) } @@ -1372,7 +1369,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out toolPreset: this.toolPreset, hasRawContent: true }) - const scopeCollector = new GlobalScopeCollector({toolPreset: this.toolPreset}) + const scopeCollector = new GlobalScopeCollector({ + toolPreset: this.toolPreset + }) const globalScope = scopeCollector.collect() const result = await compileRawPromptArtifact({ filePath: cmd.dir.getAbsolutePath(), @@ -1385,11 +1384,15 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out } const commandFrontMatterTransformer = this.commandsConfig.transformFrontMatter - if (commandFrontMatterTransformer == null) throw new Error(`commands.transformFrontMatter is required for command output plugin: ${this.name}`) + if (commandFrontMatterTransformer == null) { + throw new Error(`commands.transformFrontMatter is required for command output plugin: ${this.name}`) + } const transformedFrontMatter = commandFrontMatterTransformer(cmd, { isRecompiled: useRecompiledFrontMatter, - ...compiledFrontMatter != null && {sourceFrontMatter: compiledFrontMatter as Record} + ...compiledFrontMatter != null && { + sourceFrontMatter: compiledFrontMatter as Record + } }) return this.buildMarkdownContent(compiledContent, transformedFrontMatter, ctx) @@ -1398,7 +1401,9 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out protected buildSubAgentContent(agent: SubAgentPrompt, ctx?: OutputPluginContext): string { const subAgentFrontMatterTransformer = this.subAgentsConfig.transformFrontMatter const transformedFrontMatter = subAgentFrontMatterTransformer?.(agent, { - ...agent.yamlFrontMatter != null && {sourceFrontMatter: agent.yamlFrontMatter as Record} + ...agent.yamlFrontMatter != null && { + sourceFrontMatter: agent.yamlFrontMatter as Record + } }) if (this.subAgentsConfig.artifactFormat === 'toml') { @@ -1406,22 +1411,14 @@ export abstract class AbstractOutputPlugin extends AbstractPlugin implements Out return this.buildSubAgentTomlContent(agent, sourceFrontMatter) } - if (transformedFrontMatter != null) return this.buildMarkdownContent(agent.content, transformedFrontMatter, ctx) + if (transformedFrontMatter != null) { + return this.buildMarkdownContent(agent.content, transformedFrontMatter, ctx) + } - return this.buildMarkdownContentWithRaw( - agent.content, - agent.yamlFrontMatter, - agent.rawFrontMatter, - ctx - ) + return this.buildMarkdownContentWithRaw(agent.content, agent.yamlFrontMatter, agent.rawFrontMatter, ctx) } protected buildSkillMainContent(skill: SkillPrompt, ctx?: OutputPluginContext): string { - return this.buildMarkdownContentWithRaw( - skill.content as string, - skill.yamlFrontMatter, - skill.rawFrontMatter, - ctx - ) + return this.buildMarkdownContentWithRaw(skill.content as string, skill.yamlFrontMatter, skill.rawFrontMatter, ctx) } } diff --git a/cli/src/plugins/plugin-core/McpConfigManager.ts b/cli/src/plugins/plugin-core/McpConfigManager.ts index 83943308..6ed7e68c 100644 --- a/cli/src/plugins/plugin-core/McpConfigManager.ts +++ b/cli/src/plugins/plugin-core/McpConfigManager.ts @@ -39,28 +39,37 @@ export interface McpWriteResult { */ export type McpConfigTransformer = (config: McpServerConfig) => Record -export function collectMcpServersFromSkills( - skills: readonly SkillPrompt[], - logger?: ILogger -): Map { +export function collectMcpServersFromSkills(skills: readonly SkillPrompt[], logger?: ILogger): Map { const merged = new Map() + const serverCountsBySkill = new Map() for (const skill of skills) { if (skill.mcpConfig == null) continue + const skillName = resolveSkillName(skill) + let count = 0 for (const [name, config] of Object.entries(skill.mcpConfig.mcpServers)) { merged.set(name, config) - logger?.debug('mcp server collected', {skill: resolveSkillName(skill), mcpName: name}) + count++ + } + if (count > 0) { + serverCountsBySkill.set(skillName, count) } } + // Emit aggregated summary log instead of per-item logs + if (serverCountsBySkill.size > 0 && logger == null) return merged + + const totalServers = [...serverCountsBySkill.values()].reduce((a, b) => a + b, 0) + logger?.debug('mcp servers collected', { + totalSkills: serverCountsBySkill.size, + totalServers, + bySkill: Object.fromEntries(serverCountsBySkill) + }) return merged } -export function transformMcpServerMap( - servers: Map, - transformer: McpConfigTransformer -): TransformedMcpConfig { +export function transformMcpServerMap(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { const result: TransformedMcpConfig = {} for (const [name, config] of servers) result[name] = transformer(config) @@ -85,10 +94,7 @@ export class McpConfigManager { return collectMcpServersFromSkills(skills, this.logger) } - transformMcpServers( - servers: Map, - transformer: McpConfigTransformer - ): TransformedMcpConfig { + transformMcpServers(servers: Map, transformer: McpConfigTransformer): TransformedMcpConfig { return transformMcpServerMap(servers, transformer) } @@ -98,28 +104,25 @@ export class McpConfigManager { const content = this.fs.readFileSync(configPath, 'utf8') return JSON.parse(content) as Record } - } - catch (error) { - this.logger.warn(buildFileOperationDiagnostic({ - code: 'MCP_CONFIG_READ_FAILED', - title: 'Failed to read existing MCP config', - operation: 'read', - targetKind: 'MCP config file', - path: configPath, - error, - details: { - fallback: 'starting fresh' - } - })) + } catch (error) { + this.logger.warn( + buildFileOperationDiagnostic({ + code: 'MCP_CONFIG_READ_FAILED', + title: 'Failed to read existing MCP config', + operation: 'read', + targetKind: 'MCP config file', + path: configPath, + error, + details: { + fallback: 'starting fresh' + } + }) + ) } return {} } - writeCursorMcpConfig( - configPath: string, - servers: TransformedMcpConfig, - dryRun: boolean - ): McpWriteResult { + writeCursorMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean): McpWriteResult { const existingConfig = this.readExistingConfig(configPath) const existingMcpServers = (existingConfig['mcpServers'] as Record) ?? {} @@ -129,15 +132,11 @@ export class McpConfigManager { return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) } - writeOpencodeMcpConfig( - configPath: string, - servers: TransformedMcpConfig, - dryRun: boolean, - additionalConfig?: Record - ): McpWriteResult { + writeOpencodeMcpConfig(configPath: string, servers: TransformedMcpConfig, dryRun: boolean, additionalConfig?: Record): McpWriteResult { const existingConfig = this.readExistingConfig(configPath) - const mergedConfig = { // Merge with additional config (like $schema, plugin array) + const mergedConfig = { + // Merge with additional config (like $schema, plugin array) ...existingConfig, ...additionalConfig, mcp: servers @@ -147,11 +146,7 @@ export class McpConfigManager { return this.writeConfigFile(configPath, content, Object.keys(servers).length, dryRun) } - writeSkillMcpConfig( - configPath: string, - rawContent: string, - dryRun: boolean - ): McpWriteResult { + writeSkillMcpConfig(configPath: string, rawContent: string, dryRun: boolean): McpWriteResult { return this.writeConfigFile(configPath, rawContent, 1, dryRun) } @@ -159,34 +154,45 @@ export class McpConfigManager { if (!this.fs.existsSync(dir)) this.fs.mkdirSync(dir, {recursive: true}) } - private writeConfigFile( - configPath: string, - content: string, - serverCount: number, - dryRun: boolean - ): McpWriteResult { + private writeConfigFile(configPath: string, content: string, serverCount: number, dryRun: boolean): McpWriteResult { if (dryRun) { - this.logger.trace({action: 'dryRun', type: 'mcpConfig', path: configPath, serverCount}) + this.logger.trace({ + action: 'dryRun', + type: 'mcpConfig', + path: configPath, + serverCount + }) return {success: true, path: configPath, serverCount, skipped: true} } try { this.ensureDirectory(path.dirname(configPath)) this.fs.writeFileSync(configPath, content) - this.logger.trace({action: 'write', type: 'mcpConfig', path: configPath, serverCount}) + this.logger.trace({ + action: 'write', + type: 'mcpConfig', + path: configPath, + serverCount + }) return {success: true, path: configPath, serverCount} - } - catch (error) { + } catch (error) { const errMsg = error instanceof Error ? error.message : String(error) - this.logger.error(buildFileOperationDiagnostic({ - code: 'MCP_CONFIG_WRITE_FAILED', - title: 'Failed to write MCP config', - operation: 'write', - targetKind: 'MCP config file', + this.logger.error( + buildFileOperationDiagnostic({ + code: 'MCP_CONFIG_WRITE_FAILED', + title: 'Failed to write MCP config', + operation: 'write', + targetKind: 'MCP config file', + path: configPath, + error: errMsg + }) + ) + return { + success: false, path: configPath, - error: errMsg - })) - return {success: false, path: configPath, serverCount: 0, error: error as Error} + serverCount: 0, + error: error as Error + } } } } @@ -234,7 +240,9 @@ export function transformMcpConfigForOpencode(config: McpServerConfig): Record if (configRecord['url'] != null) result['url'] = configRecord['url'] - else if (configRecord['serverUrl'] != null) result['url'] = configRecord['serverUrl'] + else if (configRecord['serverUrl'] != null) { + result['url'] = configRecord['serverUrl'] + } } result['enabled'] = config.disabled !== true diff --git a/cli/src/plugins/plugin-core/constants.ts b/cli/src/plugins/plugin-core/constants.ts index ab8d4b24..63078971 100644 --- a/cli/src/plugins/plugin-core/constants.ts +++ b/cli/src/plugins/plugin-core/constants.ts @@ -81,9 +81,7 @@ export const SourcePromptExtensions = { PRIMARY: '.src.mdx' } as const -export const SourcePromptFileExtensions = [ - SourcePromptExtensions.PRIMARY -] as const +export const SourcePromptFileExtensions = [SourcePromptExtensions.PRIMARY] as const export const SourceLocaleExtensions = { zh: SourcePromptFileExtensions, @@ -91,8 +89,7 @@ export const SourceLocaleExtensions = { } as const export function hasSourcePromptExtension(fileName: string): boolean { - return SourcePromptFileExtensions.some(extension => - fileName.endsWith(extension)) + return SourcePromptFileExtensions.some(extension => fileName.endsWith(extension)) } export const GlobalConfigDirs = { @@ -108,13 +105,7 @@ export const IgnoreFiles = { } as const export const PreservedSkills = { - CURSOR: new Set([ - 'create-rule', - 'create-skill', - 'create-subagent', - 'migrate-to-skills', - 'update-cursor-settings' - ]) + CURSOR: new Set(['create-rule', 'create-skill', 'create-subagent', 'migrate-to-skills', 'update-cursor-settings']) } as const export const ToolPresets = { diff --git a/doc/content/cli/install.mdx b/doc/content/cli/install.mdx index d89c0b3f..dabb8bdf 100644 --- a/doc/content/cli/install.mdx +++ b/doc/content/cli/install.mdx @@ -18,7 +18,7 @@ keywords: - 根工作区要求 `Node.js >= 22` - 根工作区开发引擎要求 `pnpm 10.30.1` -- Rust workspace 目标基线为 `rust >= 1.87.0` +- Rust workspace 目标基线为 `rust >= 1.88.0` - `gui/` 的开发引擎要求更高,当前声明为 `rust >= 1.93.1` 与 `node >= 25.2.1` 如果你只使用文档站与 CLI,先满足根工作区要求即可;如果你还要构建桌面 GUI,再对齐 `gui/` 的更高版本。 diff --git a/doc/content/cli/plugin-config.mdx b/doc/content/cli/plugin-config.mdx index afa295a7..707a9291 100644 --- a/doc/content/cli/plugin-config.mdx +++ b/doc/content/cli/plugin-config.mdx @@ -27,7 +27,7 @@ status: stable - `JetBrainsAIAssistantCodexOutputPlugin` - `DroidCLIOutputPlugin` - `GeminiCLIOutputPlugin` -- `GenericSkillsOutputPlugin` +- `GenericSkillsOutputPlugin`(deprecated,保留用于兼容旧技能分发;清理时需要连同全局 `~/.skills/` 整个目录一起移除) - `OpencodeCLIOutputPlugin` - `QoderIDEPluginOutputPlugin` - `TraeIDEOutputPlugin` diff --git a/doc/content/technical-details/documentation-components.mdx b/doc/content/technical-details/documentation-components.mdx index d0a2aab0..ac206868 100644 --- a/doc/content/technical-details/documentation-components.mdx +++ b/doc/content/technical-details/documentation-components.mdx @@ -136,7 +136,7 @@ status: stable stack: 'Rust workspace', task: '执行测试', command: 'cargo test --workspace', - runtime: 'Rust 1.87+', + runtime: 'Rust 1.88+', notes: '适合核对 Rust-first 侧是否被改动影响。' } ]} diff --git a/doc/package.json b/doc/package.json index 52ce54c3..674d25fe 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10330.108", + "version": "2026.10330.118", "private": true, "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", "engines": { diff --git a/gui/package.json b/gui/package.json index 853f6d13..02df2960 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10330.108", + "version": "2026.10330.118", "private": true, "engines": { "node": ">=25.2.1", diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index 47750281..aabea011 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "memory-sync-gui" -version = "2026.10330.108" +version = "2026.10330.118" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true +rust-version = "1.93.1" license.workspace = true repository.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index d4a37ce9..6b23044d 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10330.108", + "version": "2026.10330.118", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/gui/src-tauri/tests/sidecar_removed_test.rs b/gui/src-tauri/tests/sidecar_removed_test.rs index 9d113c68..2ae6733c 100644 --- a/gui/src-tauri/tests/sidecar_removed_test.rs +++ b/gui/src-tauri/tests/sidecar_removed_test.rs @@ -5,7 +5,6 @@ /// /// These tests read the source file at compile time (via `include_str!`) and /// assert that the removed function definitions are no longer present. - const COMMANDS_SRC: &str = include_str!("../src/commands.rs"); /// Helper: assert a `fn ` definition is absent from the source. diff --git a/libraries/logger/Cargo.toml b/libraries/logger/Cargo.toml index 2943d524..b469b8f6 100644 --- a/libraries/logger/Cargo.toml +++ b/libraries/logger/Cargo.toml @@ -3,6 +3,7 @@ name = "tnmsc-logger" description = "Structured JSON logger with ANSI color support for tnmsc" version.workspace = true edition.workspace = true +rust-version.workspace = true license.workspace = true authors.workspace = true repository.workspace = true diff --git a/libraries/logger/package.json b/libraries/logger/package.json index 6bcdc6a4..46aa8127 100644 --- a/libraries/logger/package.json +++ b/libraries/logger/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/logger", "type": "module", - "version": "2026.10330.108", + "version": "2026.10330.118", "private": true, "description": "Rust-powered structured logger for Node.js via N-API", "license": "AGPL-3.0-only", diff --git a/libraries/logger/src/lib.rs b/libraries/logger/src/lib.rs index 6b8c1eb4..b011826e 100644 --- a/libraries/logger/src/lib.rs +++ b/libraries/logger/src/lib.rs @@ -195,10 +195,10 @@ fn resolve_log_level(explicit: Option) -> LogLevel { if let Some(l) = get_global_log_level() { return l; } - if let Ok(env_val) = std::env::var("LOG_LEVEL") { - if let Some(l) = LogLevel::from_str_loose(&env_val) { - return l; - } + if let Ok(env_val) = std::env::var("LOG_LEVEL") + && let Some(l) = LogLevel::from_str_loose(&env_val) + { + return l; } LogLevel::Info } @@ -554,32 +554,32 @@ fn build_copy_text(record: &LoggerDiagnosticRecord) -> Vec { append_section(&mut lines, "Exact Fix", exact_fix, None); } - if let Some(possible_fixes) = &record.possible_fixes { - if !possible_fixes.is_empty() { - if !lines.is_empty() { - lines.push(String::new()); + if let Some(possible_fixes) = &record.possible_fixes + && !possible_fixes.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); + } + lines.push("Possible Fixes".to_string()); + for (index, fix) in possible_fixes.iter().enumerate() { + let mut iter = fix.iter(); + if let Some(first) = iter.next() { + lines.push(format!("{}. {}", index + 1, first)); } - lines.push("Possible Fixes".to_string()); - for (index, fix) in possible_fixes.iter().enumerate() { - let mut iter = fix.iter(); - if let Some(first) = iter.next() { - lines.push(format!("{}. {}", index + 1, first)); - } - for entry in iter { - lines.push(format!(" {entry}")); - } + for entry in iter { + lines.push(format!(" {entry}")); } } } - if let Some(details) = &record.details { - if !details.is_empty() { - if !lines.is_empty() { - lines.push(String::new()); - } - lines.push("Context".to_string()); - lines.extend(value_to_copy_text_lines(&Value::Object(details.clone()))); + if let Some(details) = &record.details + && !details.is_empty() + { + if !lines.is_empty() { + lines.push(String::new()); } + lines.push("Context".to_string()); + lines.extend(value_to_copy_text_lines(&Value::Object(details.clone()))); } lines diff --git a/libraries/md-compiler/Cargo.toml b/libraries/md-compiler/Cargo.toml index 9233b04c..562a5ac7 100644 --- a/libraries/md-compiler/Cargo.toml +++ b/libraries/md-compiler/Cargo.toml @@ -3,6 +3,7 @@ name = "tnmsc-md-compiler" description = "MDX to Markdown compiler with expression evaluation and JSX component processing" version.workspace = true edition.workspace = true +rust-version.workspace = true license.workspace = true authors.workspace = true repository.workspace = true diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json index 29a1583a..2d507e3e 100644 --- a/libraries/md-compiler/package.json +++ b/libraries/md-compiler/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/md-compiler", "type": "module", - "version": "2026.10330.108", + "version": "2026.10330.118", "private": true, "description": "Rust-powered MDX→Markdown compiler for Node.js with pure-TS fallback", "license": "AGPL-3.0-only", diff --git a/libraries/md-compiler/src/expression_eval.rs b/libraries/md-compiler/src/expression_eval.rs index 5054d4bb..38e92ef0 100644 --- a/libraries/md-compiler/src/expression_eval.rs +++ b/libraries/md-compiler/src/expression_eval.rs @@ -127,10 +127,10 @@ fn evaluate_simple_reference(reference: &str, scope: &EvaluationScope) -> Result /// Try to parse a string literal ("..." or '...'). fn try_parse_string_literal(s: &str) -> Option { - if (s.starts_with('"') && s.ends_with('"')) || (s.starts_with('\'') && s.ends_with('\'')) { - if s.len() >= 2 { - return Some(s[1..s.len() - 1].to_string()); - } + if ((s.starts_with('"') && s.ends_with('"')) || (s.starts_with('\'') && s.ends_with('\''))) + && s.len() >= 2 + { + return Some(s[1..s.len() - 1].to_string()); } None } diff --git a/libraries/md-compiler/src/mdx_to_md.rs b/libraries/md-compiler/src/mdx_to_md.rs index dabdfeef..0275b9a6 100644 --- a/libraries/md-compiler/src/mdx_to_md.rs +++ b/libraries/md-compiler/src/mdx_to_md.rs @@ -122,12 +122,10 @@ fn merge_scopes( fn extract_yaml_frontmatter(ast: &markdown::mdast::Node) -> Option> { if let markdown::mdast::Node::Root(root) = ast { for child in &root.children { - if let markdown::mdast::Node::Yaml(yaml) = child { - if let Ok(parsed) = serde_yml::from_str::(&yaml.value) { - if let Value::Object(map) = parsed { - return Some(map.into_iter().collect()); - } - } + if let markdown::mdast::Node::Yaml(yaml) = child + && let Ok(Value::Object(map)) = serde_yml::from_str::(&yaml.value) + { + return Some(map.into_iter().collect()); } } } @@ -147,13 +145,13 @@ fn extract_exports_from_source(source: &str) -> HashMap { } // Try to parse: export const NAME = VALUE - if let Some(rest) = trimmed.strip_prefix("export const ") { - if let Some(eq_pos) = rest.find('=') { - let name = rest[..eq_pos].trim(); - let value_str = rest[eq_pos + 1..].trim(); - if let Ok(val) = serde_json::from_str::(value_str) { - exports.insert(name.to_string(), val); - } + if let Some(rest) = trimmed.strip_prefix("export const ") + && let Some(eq_pos) = rest.find('=') + { + let name = rest[..eq_pos].trim(); + let value_str = rest[eq_pos + 1..].trim(); + if let Ok(val) = serde_json::from_str::(value_str) { + exports.insert(name.to_string(), val); } } } diff --git a/libraries/md-compiler/src/transformer.rs b/libraries/md-compiler/src/transformer.rs index 897770a8..f31b5b30 100644 --- a/libraries/md-compiler/src/transformer.rs +++ b/libraries/md-compiler/src/transformer.rs @@ -78,19 +78,19 @@ fn register_built_in_components(ctx: &mut ProcessingContext) { /// Evaluate the `when` attribute of a JSX element. fn evaluate_when_condition(element: &MdxJsxFlowElement, ctx: &ProcessingContext) -> bool { for attr in &element.attributes { - if let AttributeContent::Property(prop) = attr { - if prop.name == "when" { - return match &prop.value { - Some(AttributeValue::Literal(s)) => s == "true", - Some(AttributeValue::Expression(expr)) => { - match evaluate_expression(&expr.value, &ctx.scope) { - Ok(v) => is_truthy(&v), - Err(_) => false, - } + if let AttributeContent::Property(prop) = attr + && prop.name == "when" + { + return match &prop.value { + Some(AttributeValue::Literal(s)) => s == "true", + Some(AttributeValue::Expression(expr)) => { + match evaluate_expression(&expr.value, &ctx.scope) { + Ok(v) => is_truthy(&v), + Err(_) => false, } - None => false, - }; - } + } + None => false, + }; } } true // No `when` attribute = always true @@ -99,19 +99,19 @@ fn evaluate_when_condition(element: &MdxJsxFlowElement, ctx: &ProcessingContext) /// Check the `when` condition for text elements too. fn evaluate_when_condition_text(element: &MdxJsxTextElement, ctx: &ProcessingContext) -> bool { for attr in &element.attributes { - if let AttributeContent::Property(prop) = attr { - if prop.name == "when" { - return match &prop.value { - Some(AttributeValue::Literal(s)) => s == "true", - Some(AttributeValue::Expression(expr)) => { - match evaluate_expression(&expr.value, &ctx.scope) { - Ok(v) => is_truthy(&v), - Err(_) => false, - } + if let AttributeContent::Property(prop) = attr + && prop.name == "when" + { + return match &prop.value { + Some(AttributeValue::Literal(s)) => s == "true", + Some(AttributeValue::Expression(expr)) => { + match evaluate_expression(&expr.value, &ctx.scope) { + Ok(v) => is_truthy(&v), + Err(_) => false, } - None => false, - }; - } + } + None => false, + }; } } true @@ -284,10 +284,10 @@ fn parse_expression_literal_value(expression: &str) -> Option { return Some(Value::Number(number.into())); } - if let Ok(number) = expression.parse::() { - if let Some(number) = Number::from_f64(number) { - return Some(Value::Number(number)); - } + if let Ok(number) = expression.parse::() + && let Some(number) = Number::from_f64(number) + { + return Some(Value::Number(number)); } None @@ -314,7 +314,7 @@ fn evaluate_attribute_expression_value(expression: &str, scope: &EvaluationScope serde_json::from_str::(&rendered) .ok() - .or_else(|| Some(Value::String(rendered))) + .or(Some(Value::String(rendered))) } fn stringify_html_attribute(name: &str, value: &Value) -> Option { @@ -465,15 +465,17 @@ fn render_source_aware_node(node: &Node, ctx: &ProcessingContext) -> String { _ => { let source_slice = get_source_slice(node.position(), ctx.source_text.as_deref()); let Some(children) = node.children() else { - return source_slice.unwrap_or_else(|| serialize_generated_nodes(&[node.clone()])); + return source_slice + .unwrap_or_else(|| serialize_generated_nodes(std::slice::from_ref(node))); }; if children.is_empty() { - return source_slice.unwrap_or_else(|| serialize_generated_nodes(&[node.clone()])); + return source_slice + .unwrap_or_else(|| serialize_generated_nodes(std::slice::from_ref(node))); } let Some(source_slice) = source_slice else { - return serialize_generated_nodes(&[node.clone()]); + return serialize_generated_nodes(std::slice::from_ref(node)); }; let Some(start_offset) = node.position().map(|position| position.start.offset) else { return source_slice; @@ -615,16 +617,16 @@ fn get_attribute_value( scope: &EvaluationScope, ) -> Option { for attr in attrs { - if let AttributeContent::Property(prop) = attr { - if prop.name == name { - return match &prop.value { - Some(AttributeValue::Literal(s)) => Some(s.clone()), - Some(AttributeValue::Expression(expr)) => { - evaluate_expression(&expr.value, scope).ok() - } - None => Some(String::new()), - }; - } + if let AttributeContent::Property(prop) = attr + && prop.name == name + { + return match &prop.value { + Some(AttributeValue::Literal(s)) => Some(s.clone()), + Some(AttributeValue::Expression(expr)) => { + evaluate_expression(&expr.value, scope).ok() + } + None => Some(String::new()), + }; } } None @@ -869,10 +871,10 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { result.extend(transform_children(&nodes, ctx)); } else if let Some(converted) = convert_jsx_to_markdown(element, ctx) { result.extend(converted); - } else if is_intrinsic_jsx_name(name) { - if let Some(preserved) = preserve_intrinsic_flow_element(element, ctx) { - result.extend(preserved); - } + } else if is_intrinsic_jsx_name(name) + && let Some(preserved) = preserve_intrinsic_flow_element(element, ctx) + { + result.extend(preserved); } // Unknown JSX elements are silently skipped } @@ -933,15 +935,15 @@ fn transform_children(children: &[Node], ctx: &ProcessingContext) -> Vec { let simplified = new_children .into_iter() .map(|c| { - if let Node::Text(t) = &c { - if t.value.contains('/') && t.value.contains('.') { - if let Some(basename) = t.value.rsplit('/').next() { - return Node::Text(Text { - value: basename.to_string(), - position: t.position.clone(), - }); - } - } + if let Node::Text(t) = &c + && t.value.contains('/') + && t.value.contains('.') + && let Some(basename) = t.value.rsplit('/').next() + { + return Node::Text(Text { + value: basename.to_string(), + position: t.position.clone(), + }); } c }) @@ -1071,10 +1073,10 @@ fn transform_inline_children(children: &[Node], ctx: &ProcessingContext) -> Vec< } } else if let Some(converted) = convert_jsx_text_to_markdown(element, ctx) { result.extend(converted); - } else if is_intrinsic_jsx_name(name) { - if let Some(preserved) = preserve_intrinsic_text_element(element, ctx) { - result.extend(preserved); - } + } else if is_intrinsic_jsx_name(name) + && let Some(preserved) = preserve_intrinsic_text_element(element, ctx) + { + result.extend(preserved); } // Unknown inline JSX elements are silently skipped } diff --git a/libraries/script-runtime/Cargo.toml b/libraries/script-runtime/Cargo.toml index d6b84445..671ee7f9 100644 --- a/libraries/script-runtime/Cargo.toml +++ b/libraries/script-runtime/Cargo.toml @@ -3,6 +3,7 @@ name = "tnmsc-script-runtime" description = "Rust-backed TypeScript proxy runtime validation for tnmsc" version.workspace = true edition.workspace = true +rust-version.workspace = true license.workspace = true authors.workspace = true repository.workspace = true diff --git a/libraries/script-runtime/package.json b/libraries/script-runtime/package.json index cc8fe1d4..b5c6b32a 100644 --- a/libraries/script-runtime/package.json +++ b/libraries/script-runtime/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/script-runtime", "type": "module", - "version": "2026.10330.108", + "version": "2026.10330.118", "private": true, "description": "Rust-backed TypeScript proxy runtime for tnmsc", "license": "AGPL-3.0-only", diff --git a/libraries/script-runtime/src/lib.rs b/libraries/script-runtime/src/lib.rs index 7854d2f6..9aa50cdd 100644 --- a/libraries/script-runtime/src/lib.rs +++ b/libraries/script-runtime/src/lib.rs @@ -188,7 +188,7 @@ fn detect_node_command() -> Result { fn build_aindex_public_dir(aindex_dir: &str) -> Result { let normalized = absolute_base_path(aindex_dir)?; - Ok(normalize_path(&normalized.join("public"))?) + normalize_path(&normalized.join("public")) } fn read_pipe_to_string(pipe: &mut Option, label: &str) -> Result { diff --git a/mcp/package.json b/mcp/package.json index b820138f..f30a8807 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-mcp", "type": "module", - "version": "2026.10330.108", + "version": "2026.10330.118", "description": "MCP stdio server for managing memory-sync prompt sources and translation artifacts", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/package.json b/package.json index 36c5c462..726b288a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10330.108", + "version": "2026.10330.118", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ @@ -51,12 +51,12 @@ }, "devEngines": { "node": ">= 25.6.1", - "rust": ">= 1.87.0" + "rust": ">= 1.88.0" }, "simple-git-hooks": { "pre-commit": "pnpm tsx .githooks/sync-versions.ts" }, - "packageManager": "pnpm@10.32.1", + "packageManager": "pnpm@10.33.0", "devDependencies": { "@antfu/eslint-config": "catalog:", "@eslint/js": "catalog:", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 1173986b..167b8017 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1165,8 +1165,8 @@ packages: react: ^18 || ^19 || ^19.0.0-rc react-dom: ^18 || ^19 || ^19.0.0-rc - '@hono/node-server@1.19.11': - resolution: {integrity: sha512-dr8/3zEaB+p0D2n/IUrlPF1HZm586qgJNXK1a9fhg/PzdtkK7Ksd5l312tJX2yBuALqDYBlG20QEbayqPyxn+g==} + '@hono/node-server@1.19.12': + resolution: {integrity: sha512-txsUW4SQ1iilgE0l9/e9VQWmELXifEFvmdA1j6WFh/aFPj99hIntrSsq/if0UWyGVkmrRPKA1wCeP+UCr1B9Uw==} engines: {node: '>=18.14.1'} peerDependencies: hono: ^4 @@ -3322,8 +3322,8 @@ packages: resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} engines: {node: '>= 0.4'} - caniuse-lite@1.0.30001781: - resolution: {integrity: sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw==} + caniuse-lite@1.0.30001782: + resolution: {integrity: sha512-dZcaJLJeDMh4rELYFw1tvSn1bhZWYFOt468FcbHHxx/Z/dFidd1I6ciyFdi3iwfQCyOjqo9upF6lGQYtMiJWxw==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} @@ -6570,7 +6570,7 @@ snapshots: react-dom: 19.2.4(react@19.2.4) use-sync-external-store: 1.6.0(react@19.2.4) - '@hono/node-server@1.19.11(hono@4.12.9)': + '@hono/node-server@1.19.12(hono@4.12.9)': dependencies: hono: 4.12.9 @@ -6864,7 +6864,7 @@ snapshots: '@modelcontextprotocol/sdk@1.28.0(zod@4.3.6)': dependencies: - '@hono/node-server': 1.19.11(hono@4.12.9) + '@hono/node-server': 1.19.12(hono@4.12.9) ajv: 8.18.0 ajv-formats: 3.0.1(ajv@8.18.0) content-type: 1.0.5 @@ -8517,7 +8517,7 @@ snapshots: browserslist@4.28.1: dependencies: baseline-browser-mapping: 2.10.12 - caniuse-lite: 1.0.30001781 + caniuse-lite: 1.0.30001782 electron-to-chromium: 1.5.328 node-releases: 2.0.36 update-browserslist-db: 1.2.3(browserslist@4.28.1) @@ -8538,7 +8538,7 @@ snapshots: call-bind-apply-helpers: 1.0.2 get-intrinsic: 1.3.0 - caniuse-lite@1.0.30001781: {} + caniuse-lite@1.0.30001782: {} ccount@2.0.1: {} @@ -10601,7 +10601,7 @@ snapshots: '@next/env': 16.2.1 '@swc/helpers': 0.5.15 baseline-browser-mapping: 2.10.12 - caniuse-lite: 1.0.30001781 + caniuse-lite: 1.0.30001782 postcss: 8.4.31 react: 19.2.4 react-dom: 19.2.4(react@19.2.4)