diff --git a/Cargo.lock b/Cargo.lock index 732156da..2d8a5ab6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -415,8 +415,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", "windows-link 0.2.1", ] @@ -1935,6 +1937,27 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "json5" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b0db21af676c1ce64250b5f40f3ce2cf27e4e47cb91ed91eb6fe9350b430c1" +dependencies = [ + "pest", + "pest_derive", + "serde", +] + +[[package]] +name = "json5" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "733a844dbd6fef128e98cb4487b887cb55454d92cd9994b1bafe004fabbe670c" +dependencies = [ + "serde", + "ucd-trie", +] + [[package]] name = "jsonptr" version = "0.6.3" @@ -2147,7 +2170,7 @@ dependencies = [ [[package]] name = "memory-sync-gui" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ "dirs", "proptest", @@ -2227,6 +2250,8 @@ dependencies = [ "napi-sys", "nohash-hasher", "rustc-hash", + "serde", + "serde_json", ] [[package]] @@ -2612,6 +2637,49 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "pest" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "pest_meta" +version = "2.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" +dependencies = [ + "pest", + "sha2 0.10.9", +] + [[package]] name = "phf" version = "0.8.0" @@ -3580,6 +3648,7 @@ version = "1.0.149" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" dependencies = [ + "indexmap 2.13.0", "itoa", "memchr", "serde", @@ -4436,18 +4505,23 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ + "base64 0.22.1", + "chrono", "clap", "dirs", "globset", + "json5 1.3.1", "napi", "napi-build", "napi-derive", "proptest", + "regex-lite", "reqwest", "serde", "serde_json", + "serde_yml", "sha2 0.11.0", "tempfile", "thiserror 2.0.18", @@ -4458,7 +4532,7 @@ dependencies = [ [[package]] name = "tnmsc-cli-shell" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ "clap", "serde_json", @@ -4468,7 +4542,7 @@ dependencies = [ [[package]] name = "tnmsc-logger" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ "napi", "napi-build", @@ -4479,8 +4553,9 @@ dependencies = [ [[package]] name = "tnmsc-md-compiler" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ + "json5 0.4.1", "markdown", "napi", "napi-build", @@ -4494,7 +4569,7 @@ dependencies = [ [[package]] name = "tnmsc-script-runtime" -version = "2026.10408.12323" +version = "2026.10411.10132" dependencies = [ "napi", "napi-build", @@ -4742,6 +4817,12 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + [[package]] name = "unarray" version = "0.1.4" diff --git a/Cargo.toml b/Cargo.toml index 8fcf5f36..55792f10 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ members = [ ] [workspace.package] -version = "2026.10408.12323" +version = "2026.10411.10132" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" @@ -26,7 +26,7 @@ tnmsc-script-runtime = { path = "libraries/script-runtime" } # Serialization serde = { version = "1.0.228", features = ["derive"] } -serde_json = "1.0.149" +serde_json = { version = "1.0.149", features = ["preserve_order"] } serde_yml = "0.0.12" # CLI @@ -47,7 +47,7 @@ reqwest = { version = "0.13.2", features = ["blocking", "json"] } markdown = "1.0.0" # NAPI-RS (Node.js native addon bindings) -napi = { version = "3.8.4", features = ["napi4"] } +napi = { version = "3.8.4", features = ["napi4", "serde-json"] } napi-derive = "3.5.3" napi-build = "2.3.1" diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 56a56d82..530e7e37 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "os": [ "darwin" ], diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index ad8e5716..82291e76 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "os": [ "darwin" ], diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 0d416e00..175f7a1a 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "os": [ "linux" ], diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 15d50515..1f8b96b0 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "os": [ "linux" ], diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 37f75020..9aceef1f 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "os": [ "win32" ], diff --git a/cli/package.json b/cli/package.json index 4a0aea5e..e1ad9d19 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-cli", "type": "module", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "description": "TrueNine Memory Synchronization CLI shell", "author": "TrueNine", "license": "AGPL-3.0-only", @@ -16,10 +16,6 @@ "types": "./dist/index.d.mts", "import": "./dist/index.mjs" }, - "./globals": { - "types": "./dist/globals.d.mts", - "import": "./dist/globals.mjs" - }, "./schema.json": "./dist/tnmsc.schema.json", "./package.json": "./package.json" }, @@ -59,17 +55,21 @@ "sync:sdk-assets": "tsx scripts/sync-sdk-dist.ts", "check": "run-s ensure:sdk-build check:run", "check:run": "run-p lint:run check:type:run", - "lint": "run-s ensure:sdk-build lint:run", + "lint": "run-s ensure:sdk-build lint:run lint:rust", "lint:run": "eslint --cache --cache-location .eslintcache .", + "lint:rust": "cargo fmt --check --manifest-path Cargo.toml", "prepublishOnly": "run-s build check", "test": "run-s ensure:sdk-build test:run", "test:run": "vitest run", - "lint:fix": "run-s ensure:sdk-build lint:fix:run", + "lint:fix": "run-s ensure:sdk-build lint:fix:run lint:fix:rust", "lint:fix:run": "eslint --fix --cache --cache-location .eslintcache .", + "lint:fix:rust": "cargo fmt --manifest-path Cargo.toml", "typecheck": "run-s ensure:sdk-build check:type:run", "check:type:run": "tsc --noEmit -p tsconfig.lib.json" }, - "dependencies": {}, + "dependencies": { + "jiti": "catalog:" + }, "optionalDependencies": { "@truenine/memory-sync-cli-darwin-arm64": "workspace:*", "@truenine/memory-sync-cli-darwin-x64": "workspace:*", @@ -83,6 +83,7 @@ }, "devDependencies": { "@truenine/eslint10-config": "catalog:", + "@truenine/logger": "workspace:*", "@truenine/memory-sync-sdk": "workspace:*", "@types/node": "catalog:", "@vitest/coverage-v8": "catalog:", diff --git a/cli/scripts/ensure-sdk-build.ts b/cli/scripts/ensure-sdk-build.ts index 444d4882..e7dcf716 100644 --- a/cli/scripts/ensure-sdk-build.ts +++ b/cli/scripts/ensure-sdk-build.ts @@ -10,7 +10,7 @@ const cliDir = resolve(__dirname, ".."); const workspaceDir = resolve(cliDir, ".."); const sdkDistDir = resolve(cliDir, "../sdk/dist"); -const REQUIRED_SDK_OUTPUTS = ["index.mjs", "index.d.mts", "globals.mjs", "globals.d.mts", "tnmsc.schema.json"] as const; +const REQUIRED_SDK_OUTPUTS = ["index.mjs", "index.d.mts", "tnmsc.schema.json"] as const; function hasRequiredSdkOutputs(): boolean { return REQUIRED_SDK_OUTPUTS.every((fileName) => existsSync(resolve(sdkDistDir, fileName))); diff --git a/cli/scripts/sync-sdk-dist.ts b/cli/scripts/sync-sdk-dist.ts index 3bb729c3..bbc3e612 100644 --- a/cli/scripts/sync-sdk-dist.ts +++ b/cli/scripts/sync-sdk-dist.ts @@ -116,7 +116,7 @@ function smokeTestScriptRuntimeWorker(): void { cwd: tempDir, workspaceDir: tempDir, aindexDir: join(tempDir, '.aindex'), - command: 'execute', + command: 'install', platform: process.platform }), 'utf8' diff --git a/cli/src/PluginPipeline.ts b/cli/src/PluginPipeline.ts deleted file mode 100644 index 94dd8106..00000000 --- a/cli/src/PluginPipeline.ts +++ /dev/null @@ -1,111 +0,0 @@ -import type { - ILogger, - OutputCleanContext, - OutputCollectedContext, - OutputPlugin, - OutputRuntimeTargets, - OutputWriteContext, - PipelineConfig, - PluginOptions -} from '@truenine/memory-sync-sdk' -import type { - Command, - CommandContext, - CommandResult -} from '@/commands/Command' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import { - createLogger, - discoverOutputRuntimeTargets, - setGlobalLogLevel -} from '@truenine/memory-sync-sdk' -import { - extractUserArgs, - parseArgs, - resolveCommand -} from '@/pipeline/CliArgumentParser' - -export class PluginPipeline { - private readonly logger: ILogger - readonly args: ParsedCliArgs - private outputPlugins: OutputPlugin[] = [] - private runtimeTargets?: OutputRuntimeTargets - - constructor(...cmdArgs: (string | undefined)[]) { - const filtered = cmdArgs.filter((arg): arg is string => arg != null) - this.args = parseArgs(extractUserArgs(filtered)) - if (this.args.logLevel != null) setGlobalLogLevel(this.args.logLevel) - this.logger = createLogger('PluginPipeline', this.args.logLevel) - this.logger.debug('initialized', {args: this.args}) - } - - registerOutputPlugins(plugins: OutputPlugin[]): this { - this.outputPlugins.push(...plugins) - return this - } - - async run(config: PipelineConfig): Promise { - const {context, outputPlugins, userConfigOptions, executionPlan} = config - this.registerOutputPlugins([...outputPlugins]) - const command: Command = resolveCommand(this.args) - return command.execute( - this.createCommandContext(context, userConfigOptions, executionPlan) - ) - } - - private createCommandContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - executionPlan: PipelineConfig['executionPlan'] - ): CommandContext { - return { - logger: this.logger, - outputPlugins: this.outputPlugins, - collectedOutputContext: ctx, - userConfigOptions, - executionPlan, - createCleanContext: dryRun => - this.createCleanContext(ctx, userConfigOptions, executionPlan, dryRun), - createWriteContext: dryRun => - this.createWriteContext(ctx, userConfigOptions, executionPlan, dryRun) - } - } - - private createCleanContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - executionPlan: PipelineConfig['executionPlan'], - dryRun: boolean - ): OutputCleanContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - executionPlan, - dryRun - } - } - - private createWriteContext( - ctx: OutputCollectedContext, - userConfigOptions: Required, - executionPlan: PipelineConfig['executionPlan'], - dryRun: boolean - ): OutputWriteContext { - return { - logger: this.logger, - collectedOutputContext: ctx, - pluginOptions: userConfigOptions, - runtimeTargets: this.getRuntimeTargets(), - executionPlan, - dryRun, - registeredPluginNames: this.outputPlugins.map(plugin => plugin.name) - } - } - - private getRuntimeTargets(): OutputRuntimeTargets { - this.runtimeTargets ??= discoverOutputRuntimeTargets(this.logger) - return this.runtimeTargets - } -} diff --git a/cli/src/pipeline/CliArgumentParser.ts b/cli/src/cli-args.ts similarity index 66% rename from cli/src/pipeline/CliArgumentParser.ts rename to cli/src/cli-args.ts index 61108a6d..2df820f8 100644 --- a/cli/src/pipeline/CliArgumentParser.ts +++ b/cli/src/cli-args.ts @@ -1,17 +1,7 @@ -import type {Command} from '@/commands/Command' -import {FactoryPriority} from '@/commands/CommandFactory' -import {CommandRegistry} from '@/commands/CommandRegistry' -import {CleanCommandFactory} from '@/commands/factories/CleanCommandFactory' -import {DryRunCommandFactory} from '@/commands/factories/DryRunCommandFactory' -import {ExecuteCommandFactory} from '@/commands/factories/ExecuteCommandFactory' -import {HelpCommandFactory} from '@/commands/factories/HelpCommandFactory' -import {PluginsCommandFactory} from '@/commands/factories/PluginsCommandFactory' -import {UnknownCommandFactory} from '@/commands/factories/UnknownCommandFactory' -import {VersionCommandFactory} from '@/commands/factories/VersionCommandFactory' - export type Subcommand = | 'help' | 'version' + | 'install' | 'dry-run' | 'clean' | 'plugins' @@ -31,6 +21,7 @@ export interface ParsedCliArgs { const VALID_SUBCOMMANDS: ReadonlySet = new Set([ 'help', 'version', + 'install', 'dry-run', 'clean', 'plugins' @@ -59,26 +50,22 @@ export function extractUserArgs(argv: readonly string[]): string[] { return args } +const RUNTIME_REGEXES: readonly RegExp[] = [ + 'node', + 'nodejs', + 'bun', + 'deno', + 'tsx', + 'ts-node', + 'npx', + 'pnpx', + 'yarn', + 'pnpm' +].map(runtime => new RegExp(`(?:^|/)${runtime}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i')) + function isRuntimeExecutable(arg: string): boolean { - const runtimes = [ - 'node', - 'nodejs', - 'bun', - 'deno', - 'tsx', - 'ts-node', - 'npx', - 'pnpx', - 'yarn', - 'pnpm' - ] const normalized = arg.toLowerCase().replaceAll('\\', '/') - return runtimes.some( - runtime => - new RegExp(`(?:^|/)${runtime}(?:\\.exe|\\.cmd|\\.ps1)?$`, 'i').test( - normalized - ) || normalized === runtime - ) + return RUNTIME_REGEXES.some(regex => regex.test(normalized)) } function isScriptOrPackage(arg: string): boolean { @@ -185,38 +172,3 @@ export function parseArgs(args: readonly string[]): ParsedCliArgs { return result } - -let commandRegistry: CommandRegistry | undefined - -function createDefaultCommandRegistry(): CommandRegistry { - const registry = new CommandRegistry() - registry.register(new VersionCommandFactory()) - registry.register(new HelpCommandFactory()) - registry.register(new UnknownCommandFactory()) - registry.registerWithPriority( - new DryRunCommandFactory(), - FactoryPriority.Subcommand - ) - registry.registerWithPriority( - new CleanCommandFactory(), - FactoryPriority.Subcommand - ) - registry.registerWithPriority( - new PluginsCommandFactory(), - FactoryPriority.Subcommand - ) - registry.registerWithPriority( - new ExecuteCommandFactory(), - FactoryPriority.Subcommand - ) - return registry -} - -function getCommandRegistry(): CommandRegistry { - commandRegistry ??= createDefaultCommandRegistry() - return commandRegistry -} - -export function resolveCommand(args: ParsedCliArgs): Command { - return getCommandRegistry().resolve(args) -} diff --git a/cli/src/cli-runtime.test.ts b/cli/src/cli-runtime.test.ts index 8a534b84..4cbc628b 100644 --- a/cli/src/cli-runtime.test.ts +++ b/cli/src/cli-runtime.test.ts @@ -1,87 +1,74 @@ import {afterEach, describe, expect, it, vi} from 'vitest' -const { - createDefaultPluginConfigMock, - pipelineRunMock, - pluginPipelineCtorMock -} = vi.hoisted(() => ({ - createDefaultPluginConfigMock: vi.fn(), - pipelineRunMock: vi.fn(), - pluginPipelineCtorMock: vi.fn() +const {cleanMock, dryRunMock, installMock, listAdaptorsMock} = vi.hoisted(() => ({ + cleanMock: vi.fn(), + dryRunMock: vi.fn(), + installMock: vi.fn(), + listAdaptorsMock: vi.fn() })) -function createEmptyProjectsBySeries() { - return { - app: [], - ext: [], - arch: [], - softwares: [] - } -} - -vi.mock('./plugin.config', () => ({ - createDefaultPluginConfig: createDefaultPluginConfigMock -})) - -vi.mock('./PluginPipeline', () => ({ - PluginPipeline: function MockPluginPipeline(...args: unknown[]) { - pluginPipelineCtorMock(...args) - return {run: pipelineRunMock} +vi.mock('@truenine/memory-sync-sdk', () => ({ + getMemorySyncSdkBinding() { + return { + install: installMock, + dryRun: dryRunMock, + clean: cleanMock, + listAdaptors: listAdaptorsMock + } + }, + createTsFallbackMemorySyncBinding() { + return { + install: installMock, + dryRun: dryRunMock, + clean: cleanMock + } } })) afterEach(() => { vi.clearAllMocks() vi.resetModules() + delete process.env['TNMSC_DISABLE_NATIVE_COMMAND_BINDING'] + delete process.env['TNMSC_DISABLE_NATIVE_BINDING'] }) describe('cli runtime lightweight commands', () => { - it('does not load plugin config for --version', async () => { + it('does not force-disable native command binding', async () => { + delete process.env['TNMSC_DISABLE_NATIVE_COMMAND_BINDING'] + delete process.env['TNMSC_DISABLE_NATIVE_BINDING'] + + await import('./cli-runtime') + + expect(process.env['TNMSC_DISABLE_NATIVE_COMMAND_BINDING']).toBeUndefined() + expect(process.env['TNMSC_DISABLE_NATIVE_BINDING']).toBeUndefined() + }) + + it('does not touch the sdk binding for --version', async () => { const {runCli} = await import('./cli-runtime') const exitCode = await runCli(['node', 'tnmsc', '--version']) expect(exitCode).toBe(0) - expect(createDefaultPluginConfigMock).not.toHaveBeenCalled() - expect(pluginPipelineCtorMock).not.toHaveBeenCalled() - expect(pipelineRunMock).not.toHaveBeenCalled() + expect(installMock).not.toHaveBeenCalled() + expect(dryRunMock).not.toHaveBeenCalled() + expect(cleanMock).not.toHaveBeenCalled() }) - it('passes the real cwd into the standard plugin config path', async () => { + it('passes the real cwd into the sdk install path', async () => { const {runCli} = await import('./cli-runtime') - createDefaultPluginConfigMock.mockResolvedValue({ - context: { - workspace: { - directory: { - pathKind: 'absolute', - path: process.cwd(), - getDirectoryName: () => 'cwd' - }, - projects: [] - } - }, - outputPlugins: [], - userConfigOptions: {}, - executionPlan: { - scope: 'workspace', - cwd: process.cwd(), - workspaceDir: process.cwd(), - projectsBySeries: createEmptyProjectsBySeries() - } - }) - pipelineRunMock.mockResolvedValue({ + installMock.mockResolvedValue({ success: true, filesAffected: 0, - dirsAffected: 0 + dirsAffected: 0, + warnings: [], + errors: [] }) const exitCode = await runCli(['node', 'tnmsc']) expect(exitCode).toBe(0) - expect(createDefaultPluginConfigMock).toHaveBeenCalledWith( - ['node', 'tnmsc'], - void 0, - process.cwd() - ) - expect(pluginPipelineCtorMock).toHaveBeenCalledWith('node', 'tnmsc') - expect(pipelineRunMock).toHaveBeenCalledTimes(1) + expect(installMock).toHaveBeenCalledWith({ + cwd: process.cwd() + }) + expect(dryRunMock).not.toHaveBeenCalled() + expect(cleanMock).not.toHaveBeenCalled() }) }) diff --git a/cli/src/cli-runtime.ts b/cli/src/cli-runtime.ts index 6825e7a6..f351adf2 100644 --- a/cli/src/cli-runtime.ts +++ b/cli/src/cli-runtime.ts @@ -1,120 +1,140 @@ -import type { - Command, - CommandContext, - CommandResult -} from '@/commands/Command' -import * as path from 'node:path' +import type {MemorySyncAdaptorInfo, MemorySyncCommandResult} from '@truenine/memory-sync-sdk' + import process from 'node:process' -import { - buildUnhandledExceptionDiagnostic, - createLogger, - FilePathKind, - flushOutput, - mergeConfig, - setGlobalLogLevel -} from '@truenine/memory-sync-sdk' -import { - extractUserArgs, - parseArgs, - resolveCommand -} from '@/pipeline/CliArgumentParser' -import {PluginPipeline} from '@/PluginPipeline' -import {createDefaultPluginConfig} from './plugin.config' - -const LIGHTWEIGHT_COMMAND_NAMES = new Set(['help', 'version', 'unknown']) - -function createEmptyProjectsBySeries(): { - readonly app: readonly never[] - readonly ext: readonly never[] - readonly arch: readonly never[] - readonly softwares: readonly never[] -} { - return { - app: [], - ext: [], - arch: [], - softwares: [] - } as const -} +import {flushOutput, setGlobalLogLevel} from '@truenine/logger' +import {createTsFallbackMemorySyncBinding, getMemorySyncSdkBinding} from '@truenine/memory-sync-sdk' +import {extractUserArgs, parseArgs} from './cli-args' -function createUnavailableContext(kind: 'cleanup' | 'write'): never { - throw new Error(`${kind} context is unavailable for lightweight commands`) +const CLI_NAME = 'tnmsc' + +export function getCliVersion(): string { + return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' } -function createLightweightCommandContext( - logLevel: ReturnType['logLevel'] -): CommandContext { - const cwd = process.cwd() - const workspaceDir = cwd - const userConfigOptions = mergeConfig({ - workspaceDir, - ...logLevel != null ? {logLevel} : {} - }) - return { - logger: createLogger('PluginPipeline', logLevel), - outputPlugins: [], - collectedOutputContext: { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: [] - } - }, - userConfigOptions, - executionPlan: { - scope: 'workspace', - cwd, - workspaceDir, - projectsBySeries: createEmptyProjectsBySeries() - }, - createCleanContext: () => createUnavailableContext('cleanup'), - createWriteContext: () => createUnavailableContext('write') - } +function toErrorMessage(error: unknown): string { + return error instanceof Error ? error.message : String(error) } -function resolveLightweightCommand( - argv: readonly string[] -): {readonly command: Command, readonly context: CommandContext} | undefined { - const parsedArgs = parseArgs( - extractUserArgs(argv.filter((arg): arg is string => arg != null)) +function writeHelp(): void { + process.stdout.write( + `${` +# ${CLI_NAME} v${getCliVersion()} + +Synchronize AI memory and configuration files across projects. + +## Usage + +- \`${CLI_NAME}\` runs the default install pipeline. +- \`${CLI_NAME} help\` shows this help message. +- \`${CLI_NAME} version\` shows the CLI version. +- \`${CLI_NAME} install\` runs the install pipeline explicitly. +- \`${CLI_NAME} dry-run\` previews what would be written. +- \`${CLI_NAME} clean\` removes generated files. +- \`${CLI_NAME} clean --dry-run\` previews what would be cleaned. +- \`${CLI_NAME} plugins\` lists the built-in output plugins. + +## Log Controls + +- \`--trace\` shows the most detail. +- \`--debug\` shows debug detail. +- \`--info\` shows key progress and results. +- \`--warn\` shows warnings only. +- \`--error\` shows errors only. + +## Configuration + +- Global user config: \`~/.aindex/.tnmsc.json\` +- Runtime core: \`@truenine/memory-sync-sdk\` +`.trim()}\n` ) - const command: Command = resolveCommand(parsedArgs) - if (!LIGHTWEIGHT_COMMAND_NAMES.has(command.name)) return void 0 - if (parsedArgs.logLevel != null) setGlobalLogLevel(parsedArgs.logLevel) - return { - command, - context: createLightweightCommandContext(parsedArgs.logLevel) +} + +function writeVersion(): void { + process.stdout.write(`# ${CLI_NAME} v${getCliVersion()}\n`) +} + +function writeUnknownCommand(command: string): void { + process.stderr.write(`Unknown command: ${command}\nRun \`${CLI_NAME} help\` for supported commands.\n`) +} + +function writePluginList(plugins: readonly MemorySyncAdaptorInfo[]): void { + const lines = ['# Registered plugins', ''] + if (plugins.length === 0) { + lines.push('- No plugins are currently registered.') + } else { + for (const plugin of plugins) { + const dependencySuffix = plugin.dependencies.length > 0 ? ` (depends on: ${plugin.dependencies.join(', ')})` : '' + lines.push(`- ${plugin.name}${dependencySuffix}`) + } } + process.stdout.write(`${lines.join('\n')}\n`) } -export async function runCli( - argv: readonly string[] = process.argv -): Promise { +export async function runCli(argv: readonly string[] = process.argv): Promise { try { - const lightweightCommand = resolveLightweightCommand(argv) - if (lightweightCommand != null) { - const result: CommandResult = await lightweightCommand.command.execute( - lightweightCommand.context - ) + const parsedArgs = parseArgs(extractUserArgs(argv)) + + if (parsedArgs.logLevel != null) setGlobalLogLevel(parsedArgs.logLevel) + + if (parsedArgs.helpFlag || parsedArgs.subcommand === 'help') { + writeHelp() + flushOutput() + return 0 + } + + if (parsedArgs.versionFlag || parsedArgs.subcommand === 'version') { + writeVersion() flushOutput() - return result.success ? 0 : 1 + return 0 + } + + if (parsedArgs.unknownCommand != null) { + writeUnknownCommand(parsedArgs.unknownCommand) + flushOutput() + return 1 + } + + const nativeBinding = getMemorySyncSdkBinding() + const fallbackBinding = createTsFallbackMemorySyncBinding() + // Pipeline commands (install / dry-run / clean) are not yet fully + // implemented in Rust, so use the mature TS fallback for them while + // keeping the native binding for prompts and listAdaptors. + const binding = { + ...nativeBinding, + install: fallbackBinding.install, + dryRun: fallbackBinding.dryRun, + clean: fallbackBinding.clean + } + const commandOptions = { + cwd: process.cwd(), + ...parsedArgs.logLevel != null ? {logLevel: parsedArgs.logLevel} : {} + } as const + + let result: MemorySyncCommandResult + switch (parsedArgs.subcommand) { + case 'plugins': { + const plugins = await binding.listAdaptors() + writePluginList(plugins) + flushOutput() + return 0 + } + case 'dry-run': + result = await binding.dryRun(commandOptions) + break + case 'clean': + result = await binding.clean({ + ...commandOptions, + dryRun: parsedArgs.dryRun + }) + break + default: + result = await binding.install(commandOptions) } - const pipeline = new PluginPipeline(...argv) - const userPluginConfig = await createDefaultPluginConfig( - argv, - void 0, - process.cwd() - ) - const result = await pipeline.run(userPluginConfig) flushOutput() return result.success ? 0 : 1 } catch (error) { - const logger = createLogger('main', 'error') - logger.error(buildUnhandledExceptionDiagnostic('main', error)) + process.stderr.write(`[${CLI_NAME}] ${toErrorMessage(error)}\n`) flushOutput() return 1 } diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 08c8092e..5a21098e 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -13,172 +13,182 @@ use clap::{Args, Parser, Subcommand}; disable_help_subcommand = true, )] pub struct Cli { - #[command(subcommand)] - pub command: Option, + #[command(subcommand)] + pub command: Option, - /// Set log level to trace (most verbose) - #[arg(long = "trace", global = true)] - pub trace: bool, + /// Set log level to trace (most verbose) + #[arg(long = "trace", global = true)] + pub trace: bool, - /// Set log level to debug - #[arg(long = "debug", global = true)] - pub debug: bool, + /// Set log level to debug + #[arg(long = "debug", global = true)] + pub debug: bool, - /// Set log level to info - #[arg(long = "info", global = true)] - pub info: bool, + /// Set log level to info + #[arg(long = "info", global = true)] + pub info: bool, - /// Set log level to warn - #[arg(long = "warn", global = true)] - pub warn: bool, + /// Set log level to warn + #[arg(long = "warn", global = true)] + pub warn: bool, - /// Set log level to error - #[arg(long = "error", global = true)] - pub error: bool, + /// Set log level to error + #[arg(long = "error", global = true)] + pub error: bool, } #[derive(Subcommand, Debug)] pub enum CliCommand { - /// Show help message - Help, + /// Show help message + Help, - /// Show version information - Version, + /// Show version information + Version, - /// Preview changes without writing files - #[command(name = "dry-run")] - DryRun, + /// Run the install pipeline + Install, - /// Remove all generated output files and directories - Clean(CleanArgs), + /// Preview changes without writing files + #[command(name = "dry-run")] + DryRun, - /// List all registered plugins - Plugins, + /// Remove all generated output files and directories + Clean(CleanArgs), + + /// List all registered plugins + Plugins, } #[derive(Args, Debug)] pub struct CleanArgs { - /// Preview cleanup without removing files - #[arg(short = 'n', long = "dry-run")] - pub dry_run: bool, + /// Preview cleanup without removing files + #[arg(short = 'n', long = "dry-run")] + pub dry_run: bool, } /// Resolved log level from CLI flags. /// When multiple flags are provided, the most verbose wins. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ResolvedLogLevel { - Trace, - Debug, - Info, - Warn, - Error, + Trace, + Debug, + Info, + Warn, + Error, } impl ResolvedLogLevel { - fn priority(self) -> u8 { - match self { - Self::Trace => 0, - Self::Debug => 1, - Self::Info => 2, - Self::Warn => 3, - Self::Error => 4, - } + fn priority(self) -> u8 { + match self { + Self::Trace => 0, + Self::Debug => 1, + Self::Info => 2, + Self::Warn => 3, + Self::Error => 4, } - - #[allow(dead_code)] - pub fn as_str(self) -> &'static str { - match self { - Self::Trace => "trace", - Self::Debug => "debug", - Self::Info => "info", - Self::Warn => "warn", - Self::Error => "error", - } + } + + #[allow(dead_code)] + pub fn as_str(self) -> &'static str { + match self { + Self::Trace => "trace", + Self::Debug => "debug", + Self::Info => "info", + Self::Warn => "warn", + Self::Error => "error", } - - pub fn to_logger_level(self) -> tnmsc_logger::LogLevel { - match self { - Self::Trace => tnmsc_logger::LogLevel::Trace, - Self::Debug => tnmsc_logger::LogLevel::Debug, - Self::Info => tnmsc_logger::LogLevel::Info, - Self::Warn => tnmsc_logger::LogLevel::Warn, - Self::Error => tnmsc_logger::LogLevel::Error, - } + } + + pub fn to_logger_level(self) -> tnmsc_logger::LogLevel { + match self { + Self::Trace => tnmsc_logger::LogLevel::Trace, + Self::Debug => tnmsc_logger::LogLevel::Debug, + Self::Info => tnmsc_logger::LogLevel::Info, + Self::Warn => tnmsc_logger::LogLevel::Warn, + Self::Error => tnmsc_logger::LogLevel::Error, } + } } /// Resolve log level from CLI flags. /// When multiple flags are set, the most verbose (lowest priority number) wins. pub fn resolve_log_level(cli: &Cli) -> Option { - let mut levels = Vec::new(); - if cli.trace { - levels.push(ResolvedLogLevel::Trace); - } - if cli.debug { - levels.push(ResolvedLogLevel::Debug); - } - if cli.info { - levels.push(ResolvedLogLevel::Info); - } - if cli.warn { - levels.push(ResolvedLogLevel::Warn); - } - if cli.error { - levels.push(ResolvedLogLevel::Error); - } - - if levels.is_empty() { - return None; - } - - levels.into_iter().min_by_key(|l| l.priority()) + let mut levels = Vec::new(); + if cli.trace { + levels.push(ResolvedLogLevel::Trace); + } + if cli.debug { + levels.push(ResolvedLogLevel::Debug); + } + if cli.info { + levels.push(ResolvedLogLevel::Info); + } + if cli.warn { + levels.push(ResolvedLogLevel::Warn); + } + if cli.error { + levels.push(ResolvedLogLevel::Error); + } + + if levels.is_empty() { + return None; + } + + levels.into_iter().min_by_key(|l| l.priority()) } /// Resolved command after processing CLI args. /// Maps clap subcommands to the internal command enum used by the runner. #[derive(Debug, Clone, PartialEq, Eq)] pub enum ResolvedCommand { - Help, - Version, - Execute, - DryRun, - Clean, - DryRunClean, - Plugins, + Help, + Version, + Install, + DryRun, + Clean, + DryRunClean, + Plugins, } /// Resolve the command to execute from parsed CLI args. pub fn resolve_command(cli: &Cli) -> ResolvedCommand { - match &cli.command { - None => ResolvedCommand::Execute, - Some(CliCommand::Help) => ResolvedCommand::Help, - Some(CliCommand::Version) => ResolvedCommand::Version, - Some(CliCommand::DryRun) => ResolvedCommand::DryRun, - Some(CliCommand::Clean(args)) => { - if args.dry_run { - ResolvedCommand::DryRunClean - } else { - ResolvedCommand::Clean - } - } - Some(CliCommand::Plugins) => ResolvedCommand::Plugins, + match &cli.command { + None => ResolvedCommand::Install, + Some(CliCommand::Help) => ResolvedCommand::Help, + Some(CliCommand::Version) => ResolvedCommand::Version, + Some(CliCommand::Install) => ResolvedCommand::Install, + Some(CliCommand::DryRun) => ResolvedCommand::DryRun, + Some(CliCommand::Clean(args)) => { + if args.dry_run { + ResolvedCommand::DryRunClean + } else { + ResolvedCommand::Clean + } } + Some(CliCommand::Plugins) => ResolvedCommand::Plugins, + } } #[cfg(test)] mod tests { - use super::*; - use clap::Parser; - - #[test] - fn resolve_command_defaults_to_execute() { - let cli = Cli::parse_from(["tnmsc"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::Execute); - } - - #[test] - fn resolve_command_parses_clean_dry_run() { - let cli = Cli::parse_from(["tnmsc", "clean", "--dry-run"]); - assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); - } + use super::*; + use clap::Parser; + + #[test] + fn resolve_command_defaults_to_install() { + let cli = Cli::parse_from(["tnmsc"]); + assert_eq!(resolve_command(&cli), ResolvedCommand::Install); + } + + #[test] + fn resolve_command_parses_install() { + let cli = Cli::parse_from(["tnmsc", "install"]); + assert_eq!(resolve_command(&cli), ResolvedCommand::Install); + } + + #[test] + fn resolve_command_parses_clean_dry_run() { + let cli = Cli::parse_from(["tnmsc", "clean", "--dry-run"]); + assert_eq!(resolve_command(&cli), ResolvedCommand::DryRunClean); + } } diff --git a/cli/src/commands/CleanCommand.ts b/cli/src/commands/CleanCommand.ts deleted file mode 100644 index c6bcb2ba..00000000 --- a/cli/src/commands/CleanCommand.ts +++ /dev/null @@ -1,43 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {performCleanup} from '@truenine/memory-sync-sdk' -import {runExecutionPreflight} from './execution-preflight' - -export class CleanCommand implements Command { - readonly name = 'clean' - - async execute(ctx: CommandContext): Promise { - const preflightResult = runExecutionPreflight(ctx, this.name) - if (preflightResult != null) return preflightResult - - const {logger, outputPlugins, createCleanContext, collectedOutputContext} = ctx - logger.info('started', { - command: 'clean', - pluginCount: outputPlugins.length, - projectCount: collectedOutputContext.workspace.projects.length, - workspaceDir: collectedOutputContext.workspace.directory.path - }) - logger.info('clean phase started', {phase: 'cleanup'}) - const result = await performCleanup(outputPlugins, createCleanContext(false), logger) - if (result.violations.length > 0 || result.conflicts.length > 0) { - logger.info('clean halted', { - phase: 'cleanup', - conflicts: result.conflicts.length, - violations: result.violations.length, - ...result.message != null ? {message: result.message} : {} - }) - return {success: false, filesAffected: 0, dirsAffected: 0, ...result.message != null ? {message: result.message} : {}} - } - logger.info('clean phase complete', { - phase: 'cleanup', - deletedFiles: result.deletedFiles, - deletedDirs: result.deletedDirs, - errors: result.errors.length - }) - logger.info('complete', { - command: 'clean', - filesAffected: result.deletedFiles, - dirsAffected: result.deletedDirs - }) - return {success: true, filesAffected: result.deletedFiles, dirsAffected: result.deletedDirs} - } -} diff --git a/cli/src/commands/Command.ts b/cli/src/commands/Command.ts deleted file mode 100644 index 866c5d62..00000000 --- a/cli/src/commands/Command.ts +++ /dev/null @@ -1,47 +0,0 @@ -import type { - ExecutionPlan, - ILogger, - OutputCleanContext, - OutputCollectedContext, - OutputPlugin, - OutputWriteContext, - PluginOptions -} from '@truenine/memory-sync-sdk' - -export interface CommandContext { - readonly logger: ILogger - readonly outputPlugins: readonly OutputPlugin[] - readonly collectedOutputContext: OutputCollectedContext - readonly userConfigOptions: Required - readonly executionPlan: ExecutionPlan - readonly createCleanContext: (dryRun: boolean) => OutputCleanContext - readonly createWriteContext: (dryRun: boolean) => OutputWriteContext -} - -export interface CommandResult { - readonly success: boolean - readonly filesAffected: number - readonly dirsAffected: number - readonly message?: string -} - -export interface PluginExecutionResult { - readonly pluginName: string - readonly kind: 'Input' | 'Output' - readonly status: 'success' | 'failed' | 'skipped' - readonly filesWritten?: number - readonly error?: string - readonly duration?: number -} - -export interface JsonPluginInfo { - readonly name: string - readonly kind: 'Input' | 'Output' - readonly description: string - readonly dependencies: readonly string[] -} - -export interface Command { - readonly name: string - execute: (ctx: CommandContext) => Promise -} diff --git a/cli/src/commands/CommandFactory.ts b/cli/src/commands/CommandFactory.ts deleted file mode 100644 index 27acf7d8..00000000 --- a/cli/src/commands/CommandFactory.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type {Command} from './Command' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' - -export interface CommandFactory { - canHandle: (args: ParsedCliArgs) => boolean - createCommand: (args: ParsedCliArgs) => Command -} - -export enum FactoryPriority { - Flags = 0, - Unknown = 1, - Subcommand = 2 -} - -export interface PrioritizedCommandFactory extends CommandFactory { - readonly priority: FactoryPriority -} diff --git a/cli/src/commands/CommandRegistry.ts b/cli/src/commands/CommandRegistry.ts deleted file mode 100644 index 736055bb..00000000 --- a/cli/src/commands/CommandRegistry.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type {Command} from './Command' -import type {CommandFactory, PrioritizedCommandFactory} from './CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from './CommandFactory' - -export class CommandRegistry { - private readonly factories: PrioritizedCommandFactory[] = [] - - register(factory: PrioritizedCommandFactory): void { - this.factories.push(factory) - this.factories.sort((a, b) => a.priority - b.priority) - } - - registerWithPriority(factory: CommandFactory, priority: FactoryPriority): void { - const prioritized: PrioritizedCommandFactory = { - priority, - canHandle: (args: ParsedCliArgs) => factory.canHandle(args), - createCommand: (args: ParsedCliArgs) => factory.createCommand(args) - } - this.factories.push(prioritized) - this.factories.sort((a, b) => a.priority - b.priority) - } - - resolve(args: ParsedCliArgs): Command { - for (const factory of this.factories) { - if (factory.priority <= FactoryPriority.Unknown && factory.canHandle(args)) return factory.createCommand(args) - } - - for (const factory of this.factories) { - if (factory.priority === FactoryPriority.Subcommand && factory.canHandle(args)) return factory.createCommand(args) - } - - for (const factory of this.factories) { - if (factory.canHandle(args)) return factory.createCommand(args) - } - - throw new Error('No command factory found for the given arguments') - } -} diff --git a/cli/src/commands/DryRunCleanCommand.ts b/cli/src/commands/DryRunCleanCommand.ts deleted file mode 100644 index 90933b96..00000000 --- a/cli/src/commands/DryRunCleanCommand.ts +++ /dev/null @@ -1,57 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import * as path from 'node:path' -import {collectAllPluginOutputs, collectDeletionTargets, logProtectedDeletionGuardError} from '@truenine/memory-sync-sdk' -import {runExecutionPreflight} from './execution-preflight' - -export class DryRunCleanCommand implements Command { - readonly name = 'dry-run-clean' - - async execute(ctx: CommandContext): Promise { - const preflightResult = runExecutionPreflight(ctx, this.name) - if (preflightResult != null) return preflightResult - - const {logger, outputPlugins, createCleanContext} = ctx - logger.info('running clean pipeline', {command: 'dry-run-clean', dryRun: true}) - const cleanCtx = createCleanContext(true) - const outputs = await collectAllPluginOutputs(outputPlugins, cleanCtx) - logger.info('collected outputs for cleanup', { - dryRun: true, - projectDirs: outputs.projectDirs.length, - projectFiles: outputs.projectFiles.length, - globalDirs: outputs.globalDirs.length, - globalFiles: outputs.globalFiles.length - }) - - const {filesToDelete, dirsToDelete, emptyDirsToDelete, violations, excludedScanGlobs} = await collectDeletionTargets(outputPlugins, cleanCtx) - const totalDirsToDelete = [...dirsToDelete, ...emptyDirsToDelete] - - if (violations.length > 0) { - logProtectedDeletionGuardError(logger, 'dry-run-cleanup', violations) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: `Protected deletion guard blocked cleanup for ${violations.length} path(s)` - } - } - - for (const file of filesToDelete) logger.info('would delete file', {path: path.isAbsolute(file) ? file : path.resolve(file), dryRun: true}) - for (const dir of [...totalDirsToDelete].sort((a, b) => b.length - a.length)) - { logger.info('would delete directory', {path: path.isAbsolute(dir) ? dir : path.resolve(dir), dryRun: true}) } - - logger.info('clean complete', { - dryRun: true, - filesAffected: filesToDelete.length, - dirsAffected: totalDirsToDelete.length, - violations: 0, - excludedScanGlobs - }) - - return { - success: true, - filesAffected: filesToDelete.length, - dirsAffected: totalDirsToDelete.length, - message: 'Dry-run complete, no files were deleted' - } - } -} diff --git a/cli/src/commands/DryRunOutputCommand.ts b/cli/src/commands/DryRunOutputCommand.ts deleted file mode 100644 index d5aeb416..00000000 --- a/cli/src/commands/DryRunOutputCommand.ts +++ /dev/null @@ -1,35 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {collectOutputDeclarations, executeDeclarativeWriteOutputs, syncWindowsConfigIntoWsl} from '@truenine/memory-sync-sdk' -import {runExecutionPreflight} from './execution-preflight' - -export class DryRunOutputCommand implements Command { - readonly name = 'dry-run-output' - - async execute(ctx: CommandContext): Promise { - const preflightResult = runExecutionPreflight(ctx, this.name) - if (preflightResult != null) return preflightResult - - const {logger, outputPlugins, createWriteContext} = ctx - logger.info('started', {command: 'dry-run-output', dryRun: true}) - const writeCtx = createWriteContext(true) - const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) - const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) - - let totalFiles = 0 - let totalDirs = 0 - for (const [pluginName, result] of results) { - totalFiles += result.files.length - totalDirs += result.dirs.length - logger.info('plugin result', {plugin: pluginName, files: result.files.length, dirs: result.dirs.length, dryRun: true}) - } - - const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) - if (wslMirrorResult.errors.length > 0) { - return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: wslMirrorResult.errors.join('\n')} - } - - totalFiles += wslMirrorResult.mirroredFiles - logger.info('complete', {command: 'dry-run-output', totalFiles, totalDirs, dryRun: true}) - return {success: true, filesAffected: totalFiles, dirsAffected: totalDirs, message: 'Dry-run complete, no files were written'} - } -} diff --git a/cli/src/commands/ExecuteCommand.ts b/cli/src/commands/ExecuteCommand.ts deleted file mode 100644 index 9fb046bc..00000000 --- a/cli/src/commands/ExecuteCommand.ts +++ /dev/null @@ -1,108 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {collectOutputDeclarations, executeDeclarativeWriteOutputs, performCleanup, syncWindowsConfigIntoWsl} from '@truenine/memory-sync-sdk' -import {runExecutionPreflight} from './execution-preflight' - -export class ExecuteCommand implements Command { - readonly name = 'execute' - - async execute(ctx: CommandContext): Promise { - const preflightResult = runExecutionPreflight(ctx, this.name) - if (preflightResult != null) return preflightResult - - const {logger, outputPlugins, createCleanContext, createWriteContext, collectedOutputContext} = ctx - logger.info('started', { - command: 'execute', - pluginCount: outputPlugins.length, - projectCount: collectedOutputContext.workspace.projects.length, - workspaceDir: collectedOutputContext.workspace.directory.path - }) - - const writeCtx = createWriteContext(false) - logger.info('execute phase started', {phase: 'collect-output-declarations'}) - const predeclaredOutputs = await collectOutputDeclarations(outputPlugins, writeCtx) - const declarationCount = [...predeclaredOutputs.values()] - .reduce((total, declarations) => total + declarations.length, 0) - logger.info('execute phase complete', { - phase: 'collect-output-declarations', - pluginCount: predeclaredOutputs.size, - declarationCount - }) - - logger.info('execute phase started', {phase: 'cleanup-before-write'}) - const cleanupResult = await performCleanup(outputPlugins, createCleanContext(false), logger, predeclaredOutputs) - if (cleanupResult.violations.length > 0 || cleanupResult.conflicts.length > 0) { - logger.info('execute halted', { - phase: 'cleanup-before-write', - conflicts: cleanupResult.conflicts.length, - violations: cleanupResult.violations.length, - ...cleanupResult.message != null ? {message: cleanupResult.message} : {} - }) - return {success: false, filesAffected: 0, dirsAffected: 0, ...cleanupResult.message != null ? {message: cleanupResult.message} : {}} - } - - logger.info('execute phase complete', { - phase: 'cleanup-before-write', - deletedFiles: cleanupResult.deletedFiles, - deletedDirs: cleanupResult.deletedDirs - }) - - logger.info('execute phase started', { - phase: 'write-output-files', - declarationCount - }) - const results = await executeDeclarativeWriteOutputs(outputPlugins, writeCtx, predeclaredOutputs) - - let totalFiles = 0 - let totalDirs = 0 - const writeErrors: string[] = [] - for (const result of results.values()) { - totalFiles += result.files.length - totalDirs += result.dirs.length - for (const fileResult of result.files) { - if (!fileResult.success) writeErrors.push(fileResult.error?.message ?? `Failed to write ${fileResult.path}`) - } - } - - logger.info('execute phase complete', { - phase: 'write-output-files', - pluginCount: results.size, - filesAffected: totalFiles, - dirsAffected: totalDirs, - writeErrors: writeErrors.length - }) - - if (writeErrors.length > 0) { - logger.info('execute halted', { - phase: 'write-output-files', - writeErrors: writeErrors.length - }) - return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: writeErrors.join('\n')} - } - - logger.info('execute phase started', {phase: 'sync-wsl-mirrors'}) - const wslMirrorResult = await syncWindowsConfigIntoWsl(outputPlugins, writeCtx, void 0, predeclaredOutputs) - if (wslMirrorResult.errors.length > 0) { - logger.info('execute halted', { - phase: 'sync-wsl-mirrors', - mirroredFiles: wslMirrorResult.mirroredFiles, - errors: wslMirrorResult.errors.length - }) - return {success: false, filesAffected: totalFiles, dirsAffected: totalDirs, message: wslMirrorResult.errors.join('\n')} - } - - totalFiles += wslMirrorResult.mirroredFiles - logger.info('execute phase complete', { - phase: 'sync-wsl-mirrors', - mirroredFiles: wslMirrorResult.mirroredFiles, - warnings: wslMirrorResult.warnings.length, - errors: wslMirrorResult.errors.length - }) - logger.info('complete', { - command: 'execute', - pluginCount: results.size, - filesAffected: totalFiles, - dirsAffected: totalDirs - }) - return {success: true, filesAffected: totalFiles, dirsAffected: totalDirs} - } -} diff --git a/cli/src/commands/HelpCommand.ts b/cli/src/commands/HelpCommand.ts deleted file mode 100644 index 3e36de1a..00000000 --- a/cli/src/commands/HelpCommand.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {getCliVersion} from './VersionCommand' - -const CLI_NAME = 'tnmsc' - -const HELP_TEXT = ` -${CLI_NAME} v${getCliVersion()} - Memory Sync CLI - -Synchronize AI memory and configuration files across projects. - -USAGE: - ${CLI_NAME} Run the sync pipeline (default) - ${CLI_NAME} help Show this help message - ${CLI_NAME} version Show version information - ${CLI_NAME} dry-run Preview what would be written - ${CLI_NAME} clean Remove all generated files - ${CLI_NAME} clean --dry-run Preview what would be cleaned - -SUBCOMMANDS: - help Show this help message - version Show version information - dry-run Preview changes without writing files - clean Remove all generated output files and directories - -ALIASES: - ${CLI_NAME} --help, ${CLI_NAME} -h Same as '${CLI_NAME} help' - ${CLI_NAME} --version, ${CLI_NAME} -v Same as '${CLI_NAME} version' - ${CLI_NAME} clean -n Same as '${CLI_NAME} clean --dry-run' - -LOG LEVEL OPTIONS: - --trace Most verbose output - --debug Debug information - --info Standard information (default) - --warn Warnings only - --error Errors only - -CLEAN OPTIONS: - -n, --dry-run Preview cleanup without removing files - -CONFIGURATION: - Global user config lives at ~/.aindex/.tnmsc.json. - Edit that file directly, then use plugin.config.ts in your project root - for project-side plugin assembly and runtime overrides. -`.trim() - -export class HelpCommand implements Command { - readonly name = 'help' - - async execute(ctx: CommandContext): Promise { - ctx.logger.info(HELP_TEXT) - return {success: true, filesAffected: 0, dirsAffected: 0, message: 'Help displayed'} - } -} diff --git a/cli/src/commands/JsonOutputCommand.ts b/cli/src/commands/JsonOutputCommand.ts deleted file mode 100644 index c4d61637..00000000 --- a/cli/src/commands/JsonOutputCommand.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type {LoggerDiagnosticRecord} from '@truenine/memory-sync-sdk' -import type {Command, CommandContext, CommandResult} from './Command' -import process from 'node:process' -import { - clearBufferedDiagnostics, - drainBufferedDiagnostics, - partitionBufferedDiagnostics -} from '@truenine/memory-sync-sdk' - -interface JsonCommandResult { - readonly success: boolean - readonly filesAffected: number - readonly dirsAffected: number - readonly message?: string - readonly pluginResults: readonly [] - readonly warnings: readonly LoggerDiagnosticRecord[] - readonly errors: readonly LoggerDiagnosticRecord[] -} - -export class JsonOutputCommand implements Command { - readonly name: string - - constructor(private readonly inner: Command) { - this.name = `json:${inner.name}` - } - - async execute(ctx: CommandContext): Promise { - clearBufferedDiagnostics() - const result = await this.inner.execute(ctx) - process.stdout.write( - `${JSON.stringify( - toJsonCommandResult(result, drainBufferedDiagnostics()) - )}\n` - ) - return result - } -} - -export function toJsonCommandResult( - result: CommandResult, - diagnostics = drainBufferedDiagnostics() -): JsonCommandResult { - const {warnings, errors} = partitionBufferedDiagnostics(diagnostics) - return { - success: result.success, - filesAffected: result.filesAffected, - dirsAffected: result.dirsAffected, - ...result.message != null ? {message: result.message} : {}, - pluginResults: [], - warnings, - errors - } -} diff --git a/cli/src/commands/PluginsCommand.ts b/cli/src/commands/PluginsCommand.ts deleted file mode 100644 index 5d12cd75..00000000 --- a/cli/src/commands/PluginsCommand.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type {Command, CommandContext, CommandResult, JsonPluginInfo} from './Command' -import process from 'node:process' - -export class PluginsCommand implements Command { - readonly name = 'plugins' - - async execute(ctx: CommandContext): Promise { - const {logger, outputPlugins} = ctx - const pluginInfos: JsonPluginInfo[] = [] - - for (const plugin of outputPlugins) { - pluginInfos.push({ - name: plugin.name, - kind: 'Output', - description: plugin.name, - dependencies: [...plugin.dependsOn ?? []] - }) - } - - process.stdout.write(`${JSON.stringify(pluginInfos)}\n`) - logger.info('plugins listed', {count: pluginInfos.length}) - return {success: true, filesAffected: 0, dirsAffected: 0, message: `Listed ${pluginInfos.length} plugin(s)`} - } -} diff --git a/cli/src/commands/UnknownCommand.ts b/cli/src/commands/UnknownCommand.ts deleted file mode 100644 index c8ec4a05..00000000 --- a/cli/src/commands/UnknownCommand.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' -import {buildUsageDiagnostic, diagnosticLines} from '@truenine/memory-sync-sdk' - -export class UnknownCommand implements Command { - readonly name = 'unknown' - - constructor(private readonly unknownCmd: string) {} - - async execute(ctx: CommandContext): Promise { - ctx.logger.error( - buildUsageDiagnostic({ - code: 'UNKNOWN_COMMAND', - title: `Unknown tnmsc command: ${this.unknownCmd}`, - rootCause: diagnosticLines(`tnmsc does not recognize the "${this.unknownCmd}" subcommand.`), - exactFix: diagnosticLines('Run `tnmsc help` and invoke one of the supported commands.'), - possibleFixes: [diagnosticLines('Check the command spelling and remove unsupported aliases or flags.')], - details: {command: this.unknownCmd} - }) - ) - ctx.logger.info('run "tnmsc help" for available commands') - return {success: false, filesAffected: 0, dirsAffected: 0, message: `Unknown command: ${this.unknownCmd}`} - } -} diff --git a/cli/src/commands/VersionCommand.ts b/cli/src/commands/VersionCommand.ts deleted file mode 100644 index c49ab789..00000000 --- a/cli/src/commands/VersionCommand.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type {Command, CommandContext, CommandResult} from './Command' - -const CLI_NAME = 'tnmsc' - -export function getCliVersion(): string { - return typeof __CLI_VERSION__ !== 'undefined' ? __CLI_VERSION__ : 'dev' -} - -export class VersionCommand implements Command { - readonly name = 'version' - - async execute(ctx: CommandContext): Promise { - ctx.logger.info(`${CLI_NAME} v${getCliVersion()}`) - return {success: true, filesAffected: 0, dirsAffected: 0, message: 'Version displayed'} - } -} diff --git a/cli/src/commands/bridge.rs b/cli/src/commands/bridge.rs index da2340b2..34046607 100644 --- a/cli/src/commands/bridge.rs +++ b/cli/src/commands/bridge.rs @@ -1,21 +1,57 @@ use std::process::ExitCode; -pub fn execute() -> ExitCode { - tnmsc::bridge::node::run_node_command("execute", &[]) +fn map_result(result: Result) -> ExitCode { + match result { + Ok(r) if r.success => ExitCode::SUCCESS, + Ok(_) => ExitCode::FAILURE, + Err(e) => { + eprintln!("Error: {}", e); + ExitCode::FAILURE + } + } +} + +pub fn install() -> ExitCode { + map_result(tnmsc::install(tnmsc::MemorySyncCommandOptions::default())) } pub fn dry_run() -> ExitCode { - tnmsc::bridge::node::run_node_command("dry-run", &[]) + map_result(tnmsc::dry_run(tnmsc::MemorySyncCommandOptions::default())) } pub fn clean() -> ExitCode { - tnmsc::bridge::node::run_node_command("clean", &[]) + map_result(tnmsc::clean(tnmsc::MemorySyncCommandOptions::default())) } pub fn dry_run_clean() -> ExitCode { - tnmsc::bridge::node::run_node_command("clean", &["--dry-run"]) + let options = tnmsc::MemorySyncCommandOptions { + dry_run: Some(true), + ..Default::default() + }; + map_result(tnmsc::clean(options)) } pub fn plugins() -> ExitCode { - tnmsc::bridge::node::run_node_command("plugins", &[]) + let plugins = tnmsc::list_plugins(); + + println!("# Registered plugins"); + println!(); + + if plugins.is_empty() { + println!("- No plugins are currently registered."); + } else { + for plugin in plugins { + if plugin.dependencies.is_empty() { + println!("- {}", plugin.name); + } else { + println!( + "- {} (depends on: {})", + plugin.name, + plugin.dependencies.join(", ") + ); + } + } + } + + ExitCode::SUCCESS } diff --git a/cli/src/commands/execution-preflight.ts b/cli/src/commands/execution-preflight.ts deleted file mode 100644 index 5e100e9b..00000000 --- a/cli/src/commands/execution-preflight.ts +++ /dev/null @@ -1,128 +0,0 @@ -import type {AindexProjectSeriesName, ExecutionPlanProjectSummary} from '@truenine/memory-sync-sdk' -import type {CommandContext, CommandResult} from './Command' -import {buildDiagnostic, diagnosticLines} from '@truenine/memory-sync-sdk' - -const SERIES_ORDER: readonly AindexProjectSeriesName[] = ['app', 'ext', 'arch', 'softwares'] - -function buildUnsupportedMessage(ctx: CommandContext): string { - return [ - `Unsupported execution directory "${ctx.executionPlan.cwd}".`, - `The directory is inside workspace "${ctx.executionPlan.workspaceDir}" but is not managed by tnmsc.`, - 'Run tnmsc from the workspace root, from a managed project directory, or from outside the workspace.' - ].join(' ') -} - -function logExternalProjectGroups(ctx: CommandContext): void { - for (const series of SERIES_ORDER) { - const projects = ctx.executionPlan.projectsBySeries[series] - if (projects.length === 0) continue - ctx.logger.info('external execution project group', { - phase: 'execution-scope', - scope: 'external', - series, - projectCount: projects.length, - projects: projects.map(project => project.name) - }) - } -} - -function logProjectSummary( - ctx: CommandContext, - commandName: string, - project: ExecutionPlanProjectSummary -): void { - ctx.logger.info('execution scope resolved to project', { - phase: 'execution-scope', - command: commandName, - scope: 'project', - cwd: ctx.executionPlan.cwd, - workspaceDir: ctx.executionPlan.workspaceDir, - projectName: project.name, - ...project.series != null ? {projectSeries: project.series} : {} - }) - ctx.logger.info('project-scoped execution only targets the matched project and global outputs', { - phase: 'execution-scope', - command: commandName, - projectName: project.name - }) -} - -export function runExecutionPreflight( - ctx: CommandContext, - commandName: string -): CommandResult | undefined { - switch (ctx.executionPlan.scope) { - case 'workspace': - ctx.logger.warn(buildDiagnostic({ - code: 'EXECUTION_SCOPE_WORKSPACE', - title: 'Execution is limited to workspace-level outputs', - rootCause: diagnosticLines( - `tnmsc resolved the current execution directory "${ctx.executionPlan.cwd}" to the workspace root.`, - 'This run will sync or clean only workspace-level outputs plus global outputs to improve performance.' - ), - exactFix: diagnosticLines( - 'Run tnmsc from a managed project directory to target one project, or from outside the workspace to process every managed project.' - ), - details: { - phase: 'execution-scope', - command: commandName, - scope: 'workspace', - cwd: ctx.executionPlan.cwd, - workspaceDir: ctx.executionPlan.workspaceDir - } - })) - return void 0 - case 'project': - logProjectSummary(ctx, commandName, ctx.executionPlan.matchedProject) - return void 0 - case 'external': - ctx.logger.warn(buildDiagnostic({ - code: 'EXECUTION_SCOPE_EXTERNAL', - title: 'Execution will process the full workspace and all managed projects', - rootCause: diagnosticLines( - `tnmsc resolved the current execution directory "${ctx.executionPlan.cwd}" as external to workspace "${ctx.executionPlan.workspaceDir}".`, - 'This run may take longer because it will process workspace-level outputs, all managed projects, and global outputs.' - ), - exactFix: diagnosticLines( - `Run tnmsc from "${ctx.executionPlan.workspaceDir}" for workspace-only execution, or from a managed project directory for project-only execution.` - ), - details: { - phase: 'execution-scope', - command: commandName, - scope: 'external', - cwd: ctx.executionPlan.cwd, - workspaceDir: ctx.executionPlan.workspaceDir - } - })) - logExternalProjectGroups(ctx) - return void 0 - case 'unsupported': { - const message = buildUnsupportedMessage(ctx) - ctx.logger.error(buildDiagnostic({ - code: 'EXECUTION_SCOPE_UNSUPPORTED', - title: 'Execution directory is inside the workspace but not managed by tnmsc', - rootCause: diagnosticLines( - `tnmsc resolved "${ctx.executionPlan.cwd}" inside workspace "${ctx.executionPlan.workspaceDir}", but the directory is not the workspace root and does not belong to any managed project.`, - 'Running from this location is unsupported because tnmsc cannot map the request to a workspace-level or project-level execution target.' - ), - exactFix: diagnosticLines( - 'Run tnmsc from the workspace root, from a managed project directory, or from outside the workspace.' - ), - details: { - phase: 'execution-scope', - command: commandName, - scope: 'unsupported', - cwd: ctx.executionPlan.cwd, - workspaceDir: ctx.executionPlan.workspaceDir, - managedProjectCount: ctx.executionPlan.managedProjects.length - } - })) - return { - success: false, - filesAffected: 0, - dirsAffected: 0, - message - } - } - } -} diff --git a/cli/src/commands/execution-routing.test.ts b/cli/src/commands/execution-routing.test.ts deleted file mode 100644 index 05ecd620..00000000 --- a/cli/src/commands/execution-routing.test.ts +++ /dev/null @@ -1,209 +0,0 @@ -import type {ExecutionPlan} from '@truenine/memory-sync-sdk' -import type {CommandContext} from './Command' -import * as path from 'node:path' -import {createLogger, FilePathKind, mergeConfig} from '@truenine/memory-sync-sdk' -import {afterEach, describe, expect, it, vi} from 'vitest' -import {CleanCommand} from './CleanCommand' -import {DryRunCleanCommand} from './DryRunCleanCommand' -import {ExecuteCommand} from './ExecuteCommand' - -function createEmptyProjectsBySeries() { - return { - app: [], - ext: [], - arch: [], - softwares: [] - } -} - -const { - collectAllPluginOutputsMock, - collectDeletionTargetsMock, - collectOutputDeclarationsMock, - executeDeclarativeWriteOutputsMock, - performCleanupMock, - syncWindowsConfigIntoWslMock -} = vi.hoisted(() => ({ - collectAllPluginOutputsMock: vi.fn(), - collectDeletionTargetsMock: vi.fn(), - collectOutputDeclarationsMock: vi.fn(), - executeDeclarativeWriteOutputsMock: vi.fn(), - performCleanupMock: vi.fn(), - syncWindowsConfigIntoWslMock: vi.fn() -})) - -vi.mock('@truenine/memory-sync-sdk', async importOriginal => { - const actual = await importOriginal() - - return { - ...actual, - collectAllPluginOutputs: collectAllPluginOutputsMock, - collectDeletionTargets: collectDeletionTargetsMock, - collectOutputDeclarations: collectOutputDeclarationsMock, - executeDeclarativeWriteOutputs: executeDeclarativeWriteOutputsMock, - performCleanup: performCleanupMock, - syncWindowsConfigIntoWsl: syncWindowsConfigIntoWslMock - } -}) - -function createBaseContext(executionPlan: ExecutionPlan): { - readonly ctx: CommandContext - readonly infoSpy: ReturnType - readonly warnSpy: ReturnType - readonly errorSpy: ReturnType -} { - const workspaceDir = executionPlan.workspaceDir - const logger = createLogger('execution-routing-test', 'debug') - const infoSpy = vi.spyOn(logger, 'info') - const warnSpy = vi.spyOn(logger, 'warn') - const errorSpy = vi.spyOn(logger, 'error') - - const collectedOutputContext = { - workspace: { - directory: { - pathKind: FilePathKind.Absolute, - path: workspaceDir, - getDirectoryName: () => path.basename(workspaceDir) - }, - projects: [] - } - } - - const createCleanContext = vi.fn((dryRun: boolean) => ({ - logger, - collectedOutputContext, - pluginOptions: mergeConfig({workspaceDir}), - runtimeTargets: {jetbrainsCodexDirs: []}, - executionPlan, - dryRun - })) - const createWriteContext = vi.fn((dryRun: boolean) => ({ - logger, - collectedOutputContext, - pluginOptions: mergeConfig({workspaceDir}), - runtimeTargets: {jetbrainsCodexDirs: []}, - executionPlan, - dryRun, - registeredPluginNames: [] - })) - - return { - ctx: { - logger, - outputPlugins: [], - collectedOutputContext, - userConfigOptions: mergeConfig({workspaceDir}), - executionPlan, - createCleanContext, - createWriteContext - } as unknown as CommandContext, - infoSpy, - warnSpy, - errorSpy - } -} - -afterEach(() => { - vi.clearAllMocks() -}) - -describe('execution-aware command routing', () => { - it('short-circuits execute when cwd is unsupported inside workspace', async () => { - const workspaceDir = path.resolve('/tmp/tnmsc-execute-unsupported') - const {ctx} = createBaseContext({ - scope: 'unsupported', - cwd: path.join(workspaceDir, 'scripts'), - workspaceDir, - projectsBySeries: createEmptyProjectsBySeries(), - managedProjects: [] - }) - - const result = await new ExecuteCommand().execute(ctx) - - expect(result.success).toBe(false) - expect(result.message).toContain('not managed by tnmsc') - expect(collectOutputDeclarationsMock).not.toHaveBeenCalled() - expect(performCleanupMock).not.toHaveBeenCalled() - expect(executeDeclarativeWriteOutputsMock).not.toHaveBeenCalled() - }) - - it('logs project scope details before running clean', async () => { - const workspaceDir = path.resolve('/tmp/tnmsc-clean-project') - const {ctx, infoSpy} = createBaseContext({ - scope: 'project', - cwd: path.join(workspaceDir, 'plugin-one', 'docs'), - workspaceDir, - projectsBySeries: { - ...createEmptyProjectsBySeries(), - ext: [{ - name: 'plugin-one', - rootDir: path.join(workspaceDir, 'plugin-one'), - series: 'ext' - }] - }, - matchedProject: { - name: 'plugin-one', - rootDir: path.join(workspaceDir, 'plugin-one'), - series: 'ext' - } - }) - performCleanupMock.mockResolvedValue({ - deletedFiles: 0, - deletedDirs: 0, - errors: [], - violations: [], - conflicts: [] - }) - - const result = await new CleanCommand().execute(ctx) - - expect(result.success).toBe(true) - expect(performCleanupMock).toHaveBeenCalledTimes(1) - expect(infoSpy.mock.calls).toEqual(expect.arrayContaining([ - ['execution scope resolved to project', expect.objectContaining({projectName: 'plugin-one', projectSeries: 'ext'})] - ])) - }) - - it('logs external project groups before running dry-run clean', async () => { - const workspaceDir = path.resolve('/tmp/tnmsc-dry-run-clean-external') - const {ctx, infoSpy, warnSpy} = createBaseContext({ - scope: 'external', - cwd: path.resolve('/tmp/outside-workspace'), - workspaceDir, - projectsBySeries: { - app: [{name: 'app-one', rootDir: path.join(workspaceDir, 'app-one'), series: 'app'}], - ext: [{name: 'plugin-one', rootDir: path.join(workspaceDir, 'plugin-one'), series: 'ext'}], - arch: [], - softwares: [{name: 'tool-one', rootDir: path.join(workspaceDir, 'tool-one'), series: 'softwares'}] - } - }) - collectAllPluginOutputsMock.mockResolvedValue({ - projectDirs: [], - projectFiles: [], - globalDirs: [], - globalFiles: [] - }) - collectDeletionTargetsMock.mockResolvedValue({ - filesToDelete: [], - dirsToDelete: [], - emptyDirsToDelete: [], - violations: [], - conflicts: [], - excludedScanGlobs: [] - }) - - const result = await new DryRunCleanCommand().execute(ctx) - - expect(result.success).toBe(true) - expect(collectAllPluginOutputsMock).toHaveBeenCalledTimes(1) - expect(collectDeletionTargetsMock).toHaveBeenCalledTimes(1) - expect(warnSpy.mock.calls).toEqual(expect.arrayContaining([ - [expect.objectContaining({code: 'EXECUTION_SCOPE_EXTERNAL', title: 'Execution will process the full workspace and all managed projects'})] - ])) - expect(infoSpy.mock.calls).toEqual(expect.arrayContaining([ - ['external execution project group', expect.objectContaining({series: 'app', projects: ['app-one']})], - ['external execution project group', expect.objectContaining({series: 'ext', projects: ['plugin-one']})], - ['external execution project group', expect.objectContaining({series: 'softwares', projects: ['tool-one']})] - ])) - }) -}) diff --git a/cli/src/commands/factories/CleanCommandFactory.ts b/cli/src/commands/factories/CleanCommandFactory.ts deleted file mode 100644 index 3e92a178..00000000 --- a/cli/src/commands/factories/CleanCommandFactory.ts +++ /dev/null @@ -1,15 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {CleanCommand} from '../CleanCommand' -import {DryRunCleanCommand} from '../DryRunCleanCommand' - -export class CleanCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'clean' - } - - createCommand(args: ParsedCliArgs): Command { - return args.dryRun ? new DryRunCleanCommand() : new CleanCommand() - } -} diff --git a/cli/src/commands/factories/DryRunCommandFactory.ts b/cli/src/commands/factories/DryRunCommandFactory.ts deleted file mode 100644 index cefc3b6f..00000000 --- a/cli/src/commands/factories/DryRunCommandFactory.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {DryRunOutputCommand} from '../DryRunOutputCommand' - -export class DryRunCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'dry-run' - } - - createCommand(): Command { - return new DryRunOutputCommand() - } -} diff --git a/cli/src/commands/factories/ExecuteCommandFactory.ts b/cli/src/commands/factories/ExecuteCommandFactory.ts deleted file mode 100644 index 681b3447..00000000 --- a/cli/src/commands/factories/ExecuteCommandFactory.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import {ExecuteCommand} from '../ExecuteCommand' - -export class ExecuteCommandFactory implements CommandFactory { - canHandle(): boolean { - return true - } - - createCommand(): Command { - return new ExecuteCommand() - } -} diff --git a/cli/src/commands/factories/HelpCommandFactory.ts b/cli/src/commands/factories/HelpCommandFactory.ts deleted file mode 100644 index 7db10b96..00000000 --- a/cli/src/commands/factories/HelpCommandFactory.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {HelpCommand} from '../HelpCommand' - -export class HelpCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Flags - - canHandle(args: ParsedCliArgs): boolean { - return args.helpFlag || args.subcommand === 'help' - } - - createCommand(): Command { - return new HelpCommand() - } -} diff --git a/cli/src/commands/factories/PluginsCommandFactory.ts b/cli/src/commands/factories/PluginsCommandFactory.ts deleted file mode 100644 index 2d3f87d3..00000000 --- a/cli/src/commands/factories/PluginsCommandFactory.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type {Command} from '../Command' -import type {CommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {PluginsCommand} from '../PluginsCommand' - -export class PluginsCommandFactory implements CommandFactory { - canHandle(args: ParsedCliArgs): boolean { - return args.subcommand === 'plugins' - } - - createCommand(): Command { - return new PluginsCommand() - } -} diff --git a/cli/src/commands/factories/UnknownCommandFactory.ts b/cli/src/commands/factories/UnknownCommandFactory.ts deleted file mode 100644 index bea8f387..00000000 --- a/cli/src/commands/factories/UnknownCommandFactory.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {UnknownCommand} from '../UnknownCommand' - -export class UnknownCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Unknown - - canHandle(args: ParsedCliArgs): boolean { - return args.unknownCommand != null - } - - createCommand(args: ParsedCliArgs): Command { - return new UnknownCommand(args.unknownCommand ?? '') - } -} diff --git a/cli/src/commands/factories/VersionCommandFactory.ts b/cli/src/commands/factories/VersionCommandFactory.ts deleted file mode 100644 index f0deb6d1..00000000 --- a/cli/src/commands/factories/VersionCommandFactory.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type {Command} from '../Command' -import type {PrioritizedCommandFactory} from '../CommandFactory' -import type {ParsedCliArgs} from '@/pipeline/CliArgumentParser' -import {FactoryPriority} from '../CommandFactory' -import {VersionCommand} from '../VersionCommand' - -export class VersionCommandFactory implements PrioritizedCommandFactory { - readonly priority = FactoryPriority.Flags - - canHandle(args: ParsedCliArgs): boolean { - return args.versionFlag || args.subcommand === 'version' - } - - createCommand(): Command { - return new VersionCommand() - } -} diff --git a/cli/src/commands/help.rs b/cli/src/commands/help.rs index e22f4143..10a55392 100644 --- a/cli/src/commands/help.rs +++ b/cli/src/commands/help.rs @@ -1,28 +1,29 @@ use std::process::ExitCode; pub fn execute() -> ExitCode { - println!("tnmsc — Memory Sync CLI"); - println!(); - println!("USAGE:"); - println!(" tnmsc [OPTIONS] [COMMAND]"); - println!(); - println!("COMMANDS:"); - println!(" (default) Sync AI memory and configuration files"); - println!(" dry-run Preview changes without writing files"); - println!(" clean Remove all generated output files"); - println!(" plugins List all registered plugins"); - println!(" version Show version information"); - println!(" help Show this help message"); - println!(); - println!("OPTIONS:"); - println!(" --trace Set log level to trace"); - println!(" --debug Set log level to debug"); - println!(" --info Set log level to info"); - println!(" --warn Set log level to warn"); - println!(" --error Set log level to error"); - println!(); - println!("CONFIGURATION:"); - println!(" Global user config: ~/.aindex/.tnmsc.json"); - println!(" Project runtime assembly: plugin.config.ts"); - ExitCode::SUCCESS + println!("tnmsc — Memory Sync CLI"); + println!(); + println!("USAGE:"); + println!(" tnmsc [OPTIONS] [COMMAND]"); + println!(); + println!("COMMANDS:"); + println!(" (default) Run the default install pipeline"); + println!(" install Run the install pipeline explicitly"); + println!(" dry-run Preview changes without writing files"); + println!(" clean Remove all generated output files"); + println!(" plugins List all registered plugins"); + println!(" version Show version information"); + println!(" help Show this help message"); + println!(); + println!("OPTIONS:"); + println!(" --trace Set log level to trace"); + println!(" --debug Set log level to debug"); + println!(" --info Set log level to info"); + println!(" --warn Set log level to warn"); + println!(" --error Set log level to error"); + println!(); + println!("CONFIGURATION:"); + println!(" Global user config: ~/.aindex/.tnmsc.json"); + println!(" Project runtime assembly: plugin.config.ts"); + ExitCode::SUCCESS } diff --git a/cli/src/commands/version.rs b/cli/src/commands/version.rs index b0cf860b..f8e4acf3 100644 --- a/cli/src/commands/version.rs +++ b/cli/src/commands/version.rs @@ -1,6 +1,6 @@ use std::process::ExitCode; pub fn execute() -> ExitCode { - println!("{}", tnmsc::version()); - ExitCode::SUCCESS + println!("{}", tnmsc::version()); + ExitCode::SUCCESS } diff --git a/cli/src/globals.ts b/cli/src/globals.ts deleted file mode 100644 index 7b218b19..00000000 --- a/cli/src/globals.ts +++ /dev/null @@ -1 +0,0 @@ -export * from '@truenine/memory-sync-sdk/globals' diff --git a/cli/src/index.test.ts b/cli/src/index.test.ts index 9966cb7d..239b0377 100644 --- a/cli/src/index.test.ts +++ b/cli/src/index.test.ts @@ -1,12 +1,9 @@ -import {listPrompts} from '@truenine/memory-sync-sdk' - import {describe, expect, it} from 'vitest' import * as cliShell from './index' describe('cli shell entrypoint', () => { - it('re-exports sdk library APIs while keeping local shell exports', async () => { + it('keeps the shell entrypoint focused on cli exports', async () => { expect(typeof cliShell.runCli).toBe('function') - expect(typeof cliShell.createDefaultPluginConfig).toBe('function') - expect(cliShell.listPrompts).toBe(listPrompts) + expect(typeof cliShell.getCliVersion).toBe('function') }) }) diff --git a/cli/src/index.ts b/cli/src/index.ts index a99ce905..cee99dd4 100644 --- a/cli/src/index.ts +++ b/cli/src/index.ts @@ -1,17 +1,15 @@ #!/usr/bin/env node -import {existsSync, realpathSync} from 'node:fs' +import {realpathSync} from 'node:fs' import process from 'node:process' import {fileURLToPath} from 'node:url' import {runCli} from './cli-runtime' export * from './cli-runtime' -export * from './plugin.config' -export * from '@truenine/memory-sync-sdk' function isCliEntrypoint(argv: readonly string[] = process.argv): boolean { const entryPath = argv[1] - if (entryPath == null || entryPath.length === 0 || !existsSync(entryPath)) return false + if (entryPath == null || entryPath.length === 0) return false try { return realpathSync(entryPath) === realpathSync(fileURLToPath(import.meta.url)) diff --git a/cli/src/main.rs b/cli/src/main.rs index 5e1199de..7ceccdf6 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,7 +1,7 @@ //! tnmsc — Rust CLI shell entry point. //! -//! Pure Rust commands: help, version -//! Bridge commands (Node.js): execute, dry-run, clean, plugins +//! Pure Rust commands: help, version, plugins +//! Facade commands: install, dry-run, clean mod cli; mod commands; @@ -14,24 +14,24 @@ use tnmsc_logger::{flush_output, set_global_log_level}; use cli::{Cli, ResolvedCommand, resolve_command, resolve_log_level}; fn main() -> ExitCode { - let cli = Cli::parse(); - - if let Some(level) = resolve_log_level(&cli) { - set_global_log_level(level.to_logger_level()); - } - - let command = resolve_command(&cli); - - let exit_code = match command { - ResolvedCommand::Help => commands::help::execute(), - ResolvedCommand::Version => commands::version::execute(), - ResolvedCommand::Execute => commands::bridge::execute(), - ResolvedCommand::DryRun => commands::bridge::dry_run(), - ResolvedCommand::Clean => commands::bridge::clean(), - ResolvedCommand::DryRunClean => commands::bridge::dry_run_clean(), - ResolvedCommand::Plugins => commands::bridge::plugins(), - }; - - flush_output(); - exit_code + let cli = Cli::parse(); + + if let Some(level) = resolve_log_level(&cli) { + set_global_log_level(level.to_logger_level()); + } + + let command = resolve_command(&cli); + + let exit_code = match command { + ResolvedCommand::Help => commands::help::execute(), + ResolvedCommand::Version => commands::version::execute(), + ResolvedCommand::Install => commands::bridge::install(), + ResolvedCommand::DryRun => commands::bridge::dry_run(), + ResolvedCommand::Clean => commands::bridge::clean(), + ResolvedCommand::DryRunClean => commands::bridge::dry_run_clean(), + ResolvedCommand::Plugins => commands::bridge::plugins(), + }; + + flush_output(); + exit_code } diff --git a/cli/src/pipeline/CliArgumentParser.test.ts b/cli/src/pipeline/CliArgumentParser.test.ts deleted file mode 100644 index eff6fc48..00000000 --- a/cli/src/pipeline/CliArgumentParser.test.ts +++ /dev/null @@ -1,9 +0,0 @@ -import {describe, expect, it} from 'vitest' -import {parseArgs, resolveCommand} from './CliArgumentParser' - -describe('cli argument parser', () => { - it('resolves the dry-run subcommand to DryRunOutputCommand', () => { - const command = resolveCommand(parseArgs(['dry-run'])) - expect(command.name).toBe('dry-run-output') - }) -}) diff --git a/cli/src/plugin-runtime.ts b/cli/src/plugin-runtime.ts deleted file mode 100644 index 6243532b..00000000 --- a/cli/src/plugin-runtime.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type { - OutputCleanContext, - OutputWriteContext, - RuntimeCommand -} from '@truenine/memory-sync-sdk' -import type {Command, CommandContext} from '@/commands/Command' -import process from 'node:process' -import { - buildUnhandledExceptionDiagnostic, - createLogger, - discoverOutputRuntimeTargets, - drainBufferedDiagnostics, - flushOutput, - setGlobalLogLevel -} from '@truenine/memory-sync-sdk' -import {CleanCommand} from '@/commands/CleanCommand' -import {DryRunCleanCommand} from '@/commands/DryRunCleanCommand' -import {DryRunOutputCommand} from '@/commands/DryRunOutputCommand' -import {ExecuteCommand} from '@/commands/ExecuteCommand' -import {JsonOutputCommand, toJsonCommandResult} from '@/commands/JsonOutputCommand' -import {PluginsCommand} from '@/commands/PluginsCommand' -import {createDefaultPluginConfig} from './plugin.config' - -const INTERNAL_BRIDGE_JSON_FLAG = '--bridge-json' - -function parseRuntimeArgs(argv: string[]): { - subcommand: RuntimeCommand - bridgeJson: boolean - dryRun: boolean -} { - const args = argv.slice(2) - let subcommand: RuntimeCommand = 'execute' - let bridgeJson = false - let dryRun = false - for (const arg of args) { - if (arg === INTERNAL_BRIDGE_JSON_FLAG) bridgeJson = true - else if (arg === '--dry-run' || arg === '-n') dryRun = true - else if (!arg.startsWith('-')) { - subcommand - = arg === 'plugins' || arg === 'clean' || arg === 'dry-run' - ? arg - : 'execute' - } - } - return {subcommand, bridgeJson, dryRun} -} - -function resolveRuntimeCommand( - subcommand: RuntimeCommand, - dryRun: boolean -): Command { - switch (subcommand) { - case 'execute': - return new ExecuteCommand() - case 'dry-run': - return new DryRunOutputCommand() - case 'clean': - return dryRun ? new DryRunCleanCommand() : new CleanCommand() - case 'plugins': - return new PluginsCommand() - } -} - -function flushAndExit(code: number): never { - flushOutput() - process.exit(code) -} - -function writeBridgeJsonFailure(error: unknown): void { - const logger = createLogger('plugin-runtime', 'silent') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) - process.stdout.write( - `${JSON.stringify( - toJsonCommandResult( - { - success: false, - filesAffected: 0, - dirsAffected: 0, - message: error instanceof Error ? error.message : String(error) - }, - drainBufferedDiagnostics() - ) - )}\n` - ) -} - -async function main(): Promise { - const {subcommand, bridgeJson, dryRun} = parseRuntimeArgs(process.argv) - if (bridgeJson) setGlobalLogLevel('silent') - const logger = createLogger('PluginRuntime') - - logger.info('runtime bootstrap started', {subcommand, bridgeJson, dryRun}) - - const userPluginConfig = await createDefaultPluginConfig( - process.argv, - subcommand, - process.cwd() - ) - let command = resolveRuntimeCommand(subcommand, dryRun) - if (bridgeJson && command.name !== 'plugins') { - command = new JsonOutputCommand(command) - } - - const {context, outputPlugins, userConfigOptions, executionPlan} - = userPluginConfig - logger.info('runtime configuration resolved', { - command: command.name, - pluginCount: outputPlugins.length, - projectCount: context.workspace.projects.length, - workspaceDir: context.workspace.directory.path, - ...context.aindexDir != null ? {aindexDir: context.aindexDir} : {} - }) - const runtimeTargets = discoverOutputRuntimeTargets(logger) - logger.info('runtime targets discovered', { - command: command.name, - jetbrainsCodexDirs: runtimeTargets.jetbrainsCodexDirs.length - }) - const createCleanContext = (dry: boolean): OutputCleanContext => ({ - logger, - collectedOutputContext: context, - pluginOptions: userConfigOptions, - runtimeTargets, - executionPlan, - dryRun: dry - }) - const createWriteContext = (dry: boolean): OutputWriteContext => ({ - logger, - collectedOutputContext: context, - pluginOptions: userConfigOptions, - runtimeTargets, - executionPlan, - dryRun: dry, - registeredPluginNames: Array.from(outputPlugins, plugin => plugin.name) - }) - const commandCtx: CommandContext = { - logger, - outputPlugins: [...outputPlugins], - collectedOutputContext: context, - userConfigOptions, - executionPlan, - createCleanContext, - createWriteContext - } - logger.info('command dispatch started', {command: command.name}) - const result = await command.execute(commandCtx) - logger.info('command dispatch complete', { - command: command.name, - success: result.success, - filesAffected: result.filesAffected, - dirsAffected: result.dirsAffected, - ...result.message != null ? {message: result.message} : {} - }) - if (!result.success) flushAndExit(1) - flushOutput() -} - -main().catch(error => { - if (parseRuntimeArgs(process.argv).bridgeJson) { - writeBridgeJsonFailure(error) - flushAndExit(1) - } - const logger = createLogger('plugin-runtime', 'error') - logger.error(buildUnhandledExceptionDiagnostic('plugin-runtime', error)) - flushAndExit(1) -}) diff --git a/cli/src/plugin.config.ts b/cli/src/plugin.config.ts deleted file mode 100644 index 47740236..00000000 --- a/cli/src/plugin.config.ts +++ /dev/null @@ -1,74 +0,0 @@ -import type {PipelineConfig, RuntimeCommand} from '@truenine/memory-sync-sdk' -import process from 'node:process' -import { - AgentsOutputPlugin, - ClaudeCodeCLIOutputPlugin, - CodexCLIOutputPlugin, - CursorOutputPlugin, - defineConfig, - DroidCLIOutputPlugin, - GeminiCLIOutputPlugin, - GitExcludeOutputPlugin, - JetBrainsAIAssistantCodexOutputPlugin, - JetBrainsIDECodeStyleConfigOutputPlugin, - KiroCLIOutputPlugin, - OpencodeCLIOutputPlugin, - QoderIDEPluginOutputPlugin, - ReadmeMdConfigFileOutputPlugin, - TraeCNIDEOutputPlugin, - TraeIDEOutputPlugin, - VisualStudioCodeIDEConfigOutputPlugin, - WarpIDEOutputPlugin, - WindsurfOutputPlugin, - ZedIDEConfigOutputPlugin -} from '@truenine/memory-sync-sdk' - -type DefineConfigWithOutputPlugins = Parameters[0] & { - readonly outputPlugins: PipelineConfig['outputPlugins'] -} - -export function resolveRuntimeCommandFromArgv(argv: readonly string[] = process.argv): RuntimeCommand { - const args = argv.filter((arg): arg is string => arg != null) - const userArgs = args.slice(2) - const subcommand = userArgs.find(arg => !arg.startsWith('-')) - if (subcommand === 'plugins') return 'plugins' - if (subcommand === 'clean') return 'clean' - if (subcommand === 'dry-run' || userArgs.includes('--dry-run') || userArgs.includes('-n')) return 'dry-run' - return 'execute' -} - -export async function createDefaultPluginConfig( - argv: readonly string[] = process.argv, - runtimeCommand: RuntimeCommand = resolveRuntimeCommandFromArgv(argv), - executionCwd: string = process.cwd() -): Promise { - const outputPlugins: PipelineConfig['outputPlugins'] = [ - new AgentsOutputPlugin(), - new ClaudeCodeCLIOutputPlugin(), - new CodexCLIOutputPlugin(), - new JetBrainsAIAssistantCodexOutputPlugin(), - new DroidCLIOutputPlugin(), - new GeminiCLIOutputPlugin(), - new KiroCLIOutputPlugin(), - new OpencodeCLIOutputPlugin(), - new QoderIDEPluginOutputPlugin(), - new TraeIDEOutputPlugin(), - new TraeCNIDEOutputPlugin(), - new WarpIDEOutputPlugin(), - new WindsurfOutputPlugin(), - new CursorOutputPlugin(), - new GitExcludeOutputPlugin(), - new JetBrainsIDECodeStyleConfigOutputPlugin(), - new VisualStudioCodeIDEConfigOutputPlugin(), - new ZedIDEConfigOutputPlugin(), - new ReadmeMdConfigFileOutputPlugin() - ] - - return defineConfig({ - executionCwd, - runtimeCommand, - outputPlugins - } as DefineConfigWithOutputPlugins) -} - -export default createDefaultPluginConfig diff --git a/cli/src/tsdown-config.test.ts b/cli/src/tsdown-config.test.ts index cb469e6e..1dc3981c 100644 --- a/cli/src/tsdown-config.test.ts +++ b/cli/src/tsdown-config.test.ts @@ -8,6 +8,7 @@ interface TsdownEntryConfig { readonly alias?: Record readonly deps?: { readonly alwaysBundle?: readonly string[] + readonly neverBundle?: readonly string[] } } @@ -18,7 +19,8 @@ function includesEntry(config: TsdownEntryConfig, targetEntry: string): boolean describe('cli tsdown config', () => { it('lets TypeScript resolve the script runtime package through workspace metadata', () => { - expect(tsconfig.compilerOptions.paths['@truenine/script-runtime']).toBeUndefined() + const paths = tsconfig.compilerOptions.paths as Record + expect(paths['@truenine/script-runtime']).toBeUndefined() }) it('bundles the worker against the built script runtime module', () => { @@ -29,8 +31,9 @@ describe('cli tsdown config', () => { resolve('../libraries/script-runtime/dist/index.mjs') ) expect(workerConfig?.deps?.alwaysBundle).toEqual(expect.arrayContaining([ - '@truenine/script-runtime', - 'jiti' + '@truenine/memory-sync-sdk', + '@truenine/script-runtime' ])) + expect(workerConfig?.deps?.neverBundle).toEqual(expect.arrayContaining(['jiti'])) }) }) diff --git a/cli/tsconfig.json b/cli/tsconfig.json index 3a30817c..eaf2d6cd 100644 --- a/cli/tsconfig.json +++ b/cli/tsconfig.json @@ -13,7 +13,7 @@ "paths": { "@sdk": ["../sdk/src/index.ts"], "@sdk/*": ["../sdk/src/*"], - "@/*": ["./src/*"], + "@/*": ["./src/*", "../sdk/src/*"], "@truenine/desk-paths": ["./src/core/desk-paths.ts"], "@truenine/desk-paths/*": ["./src/core/desk-paths/*"], "@truenine/plugin-output-shared": ["./src/plugins/plugin-output-shared/index.ts"], diff --git a/cli/tsdown.config.ts b/cli/tsdown.config.ts index 2c7684ad..7bc21152 100644 --- a/cli/tsdown.config.ts +++ b/cli/tsdown.config.ts @@ -2,7 +2,8 @@ import {resolve} from 'node:path' import {defineConfig} from 'tsdown' const alwaysBundleDeps = ['@truenine/memory-sync-sdk'] -const scriptRuntimeWorkerBundleDeps = [...alwaysBundleDeps, '@truenine/script-runtime', 'jiti'] +const scriptRuntimeWorkerBundleDeps = [...alwaysBundleDeps, '@truenine/script-runtime'] +const neverBundleDeps = ['jiti'] export default defineConfig([ { @@ -10,43 +11,23 @@ export default defineConfig([ platform: 'node', sourcemap: false, unbundle: false, - deps: { - alwaysBundle: alwaysBundleDeps, - onlyBundle: false - }, format: ['esm'], minify: true, dts: {sourcemap: false}, - outputOptions: {exports: 'named'} - }, - { - entry: ['./src/globals.ts'], - platform: 'node', - sourcemap: false, - deps: { - alwaysBundle: alwaysBundleDeps - }, - format: ['esm'], - minify: false, - dts: {sourcemap: false} - }, - { - entry: ['./src/plugin-runtime.ts'], - platform: 'node', - sourcemap: false, deps: { - alwaysBundle: alwaysBundleDeps + alwaysBundle: alwaysBundleDeps, + onlyBundle: false, + neverBundle: neverBundleDeps }, - format: ['esm'], - minify: true, - dts: false + outputOptions: {exports: 'named'} }, { entry: ['./src/script-runtime-worker.ts'], platform: 'node', sourcemap: false, deps: { - alwaysBundle: scriptRuntimeWorkerBundleDeps + alwaysBundle: scriptRuntimeWorkerBundleDeps, + neverBundle: neverBundleDeps }, alias: { '@truenine/script-runtime': resolve('../libraries/script-runtime/dist/index.mjs') diff --git a/cli/vite.config.ts b/cli/vite.config.ts index 6d2f1c26..58bcf392 100644 --- a/cli/vite.config.ts +++ b/cli/vite.config.ts @@ -1,10 +1,42 @@ +import * as fs from 'node:fs' +import * as path from 'node:path' import {fileURLToPath} from 'node:url' import {defineConfig} from 'vite' +const __dirname = path.dirname(fileURLToPath(import.meta.url)) + +function tryResolve(baseDir: string, relPath: string): string | undefined { + const candidate = path.resolve(baseDir, relPath) + if (fs.existsSync(candidate) && fs.statSync(candidate).isFile()) { + return candidate + } + const exts = ['.ts', '.tsx', '.js', '.jsx'] + for (const ext of exts) { + if (fs.existsSync(candidate + ext)) return candidate + ext + } + if (!(fs.existsSync(candidate) && fs.statSync(candidate).isDirectory())) return void 0 + + const indexExts = ['index.ts', 'index.tsx', 'index.js', 'index.jsx'] + for (const ext of indexExts) { + const indexPath = path.join(candidate, ext) + if (fs.existsSync(indexPath)) return indexPath + } + return void 0 +} + export default defineConfig({ - resolve: { - alias: { - '@': fileURLToPath(new URL('./src', import.meta.url)) + plugins: [ + { + name: 'resolve-sdk-aliases', + enforce: 'pre', + resolveId(id) { + if (!id.startsWith('@/')) return void 0 + const rel = id.slice(2) + const local = tryResolve(path.resolve(__dirname, 'src'), rel) + if (local != null) return local + const sdk = tryResolve(path.resolve(__dirname, '../sdk/src'), rel) + if (sdk != null) return sdk + } } - } + ] }) diff --git a/doc/content/cli/_meta.ts b/doc/content/cli/_meta.ts index cf92a996..0184f08e 100644 --- a/doc/content/cli/_meta.ts +++ b/doc/content/cli/_meta.ts @@ -2,7 +2,7 @@ export default { 'index': '概览', 'install': '安装与要求', 'workspace-setup': '工作区与 aindex', - 'first-sync': '第一次同步', + 'first-sync': '第一次安装', 'migration': '从旧文档迁移', 'cli-commands': 'CLI 命令', 'dry-run-and-clean': 'dry-run 与 clean', diff --git a/doc/content/cli/cli-commands.mdx b/doc/content/cli/cli-commands.mdx index 7def5abb..6ca0d8e2 100644 --- a/doc/content/cli/cli-commands.mdx +++ b/doc/content/cli/cli-commands.mdx @@ -11,9 +11,10 @@ status: stable | 命令 | 说明 | | --- | --- | -| `tnmsc` | 运行默认同步流水线 | +| `tnmsc` | 运行默认 install 流水线 | | `tnmsc help` | 显示帮助 | | `tnmsc version` | 显示版本 | +| `tnmsc install` | 显式运行 install 流水线 | | `tnmsc dry-run` | 预览将要写入的文件 | | `tnmsc clean` | 删除生成输出,并继续清理项目源码树中的空目录 | | `tnmsc clean --dry-run` | 预览将被清理的内容,包括后续会一并移除的空目录 | diff --git a/doc/content/cli/first-sync.mdx b/doc/content/cli/first-sync.mdx index a20d1897..2632b475 100644 --- a/doc/content/cli/first-sync.mdx +++ b/doc/content/cli/first-sync.mdx @@ -1,24 +1,24 @@ --- -title: 第一次同步 -description: 用最短路径走完 tnmsc help、dry-run、真实同步运行和结果核验。 -sidebarTitle: 第一次同步 +title: 第一次安装流程 +description: 用最短路径走完 tnmsc help、dry-run、tnmsc install 和结果核验。 +sidebarTitle: 第一次安装 status: stable --- -# 第一次同步 +# 第一次安装流程 ## 推荐顺序 1. 先运行 `tnmsc help`,确认你看到的是当前命令集。 2. 然后运行 `tnmsc dry-run`,查看哪些文件将会被写入。 -3. 只有在确认范围之后,才运行默认 sync 流水线。 +3. 只有在确认范围之后,才运行默认 install 流水线。 ## 最短流程 ```sh tnmsc help tnmsc dry-run -tnmsc +tnmsc install ``` ## 为什么不要跳过 `dry-run` @@ -27,7 +27,7 @@ tnmsc 如果你不确定清理风险,先看 [dry-run 与 clean](/docs/cli/dry-run-and-clean) 和 [清理保护](/docs/cli/cleanup-protection)。 -## 同步后要核对什么 +## Install 后要核对什么 - 目标工具是否出现在[支持的输出](/docs/cli/supported-outputs)中 - 实际写入范围是否符合[输出范围](/docs/cli/output-scopes) diff --git a/doc/content/cli/index.mdx b/doc/content/cli/index.mdx index 6c2a71c0..944dfc6a 100644 --- a/doc/content/cli/index.mdx +++ b/doc/content/cli/index.mdx @@ -1,20 +1,20 @@ --- title: CLI -description: 围绕 tnmsc 命令面组织安装、项目准备、同步流程、配置字段与故障排查内容。 +description: 围绕 tnmsc 命令面组织安装、项目准备、install 流程、配置字段与故障排查内容。 sidebarTitle: 概览 status: stable --- # CLI -这一部分围绕公开的 `tnmsc` 命令面展开。像“怎么安装”“怎么准备项目”“怎么运行 sync”以及“某个配置字段到底是什么意思”这类问题,都应该先从这里开始。 +这一部分围绕公开的 `tnmsc` 命令面展开。像“怎么安装”“怎么准备项目”“怎么运行 `tnmsc install`”以及“某个配置字段到底是什么意思”这类问题,都应该先从这里开始。 ## 本节包含什么 - [安装与要求](/docs/cli/install):确认 Node、pnpm、Rust,以及更高版本 GUI 开发引擎的边界。 - [aindex 与 `.tnmsc.json`](/docs/quick-guide/aindex-and-config):在一个页面里准备源目录、配置文件和路径映射。 - [工作区与 aindex](/docs/cli/workspace-setup):理解工作区侧剩余职责与 `plugin.config.ts` 的分工。 -- [第一次同步](/docs/cli/first-sync):按推荐顺序运行 `help`、`dry-run` 和真实写入流程。 +- [第一次安装流程](/docs/cli/first-sync):按推荐顺序运行 `help`、`dry-run` 和 `tnmsc install`。 - [CLI 命令](/docs/cli/cli-commands):查看 `tnmsc --help` 当前暴露的命令面。 - [dry-run 与 clean](/docs/cli/dry-run-and-clean):先预览,再写入,最后再清理。 - [plugin.config.ts](/docs/cli/plugin-config) 与 [JSON Schema](/docs/cli/schema):核对运行时装配方式和当前 `.tnmsc.json` 字段面。 @@ -26,5 +26,5 @@ status: stable 1. 先看[安装与要求](/docs/cli/install)。 2. 接着看 [aindex 与 `.tnmsc.json`](/docs/quick-guide/aindex-and-config)。 -3. 然后按[第一次同步](/docs/cli/first-sync)完成一次真实运行。 +3. 然后按[第一次安装流程](/docs/cli/first-sync)完成一次真实运行。 4. 需要核对事实时,再回来看 [CLI 命令](/docs/cli/cli-commands) 和 [JSON Schema](/docs/cli/schema)。 diff --git a/doc/content/cli/install.mdx b/doc/content/cli/install.mdx index 9c3b5254..8b13dfc0 100644 --- a/doc/content/cli/install.mdx +++ b/doc/content/cli/install.mdx @@ -46,12 +46,13 @@ pnpm -C cli exec node dist/index.mjs --help CLI help 里当前可见的核心命令有: -- 默认 sync 流水线 +- 默认 install 流水线 +- `install` - `help` - `version` - `dry-run` - `clean` -- `config key=value` +- `plugins` ## 首次检查 @@ -61,4 +62,4 @@ CLI help 里当前可见的核心命令有: tnmsc help ``` -你应该能看到 `dry-run`、`clean` 和 `config`。如果实际看到的不是这些,就先停在这里,不要继续照着后面的文档做。 +你应该能看到 `install`、`dry-run`、`clean` 和 `plugins`。如果实际看到的不是这些,就先停在这里,不要继续照着后面的文档做。 diff --git a/doc/content/cli/workspace-setup.mdx b/doc/content/cli/workspace-setup.mdx index e7d40070..0b4be35a 100644 --- a/doc/content/cli/workspace-setup.mdx +++ b/doc/content/cli/workspace-setup.mdx @@ -40,4 +40,4 @@ status: stable ## 下一步 -目录准备好之后,继续看[第一次同步](/docs/cli/first-sync)。在真实写入前,先用 `dry-run` 校验输出范围。 +目录准备好之后,继续看[第一次安装流程](/docs/cli/first-sync)。在真实写入前,先用 `dry-run` 校验输出范围。 diff --git a/doc/content/gui/index.mdx b/doc/content/gui/index.mdx index fa4c2dd5..05b39168 100644 --- a/doc/content/gui/index.mdx +++ b/doc/content/gui/index.mdx @@ -14,18 +14,18 @@ status: stable > > 等核心功能更完整之后,我预计会回来继续维护 GUI。 -`gui/` 是基于 Tauri 和 React 构建的桌面调用层。它的职责不是变成系统架构的中心,而是把 `sdk/` 中 `tnmsc` crate 暴露出来的配置编辑、执行、展示和日志检查能力组织成桌面工作流。 +`gui/` 是基于 Tauri 和 React 构建的桌面调用层。它的职责不是变成系统架构的中心,而是把 `sdk/` 中 `tnmsc` crate 暴露出来的配置编辑、install、展示和日志检查能力组织成桌面工作流。 ## 这一层负责什么 -- 触发 sync、`dry-run` 和 cleanup +- 触发 `install`、`dry-run` 和 cleanup - 编辑或展示 config - 浏览文件、plugin 结果和日志 - 提供基于页面的桌面工作流 ## 这一层不负责什么 -- 它不会重新实现 sync core +- 它不会重新实现 install core - 它不会在前端重新推导一套 CLI 规则 - 它不会改变长期坚持的 Rust-first / NAPI-first 方向 diff --git a/doc/content/gui/workflows-and-pages.mdx b/doc/content/gui/workflows-and-pages.mdx index cd61d19e..4a21443d 100644 --- a/doc/content/gui/workflows-and-pages.mdx +++ b/doc/content/gui/workflows-and-pages.mdx @@ -22,14 +22,14 @@ status: stable ## 这些页面分别表示什么 - Dashboard:统计信息、快捷操作,以及受支持工具概览 -- Pipeline:运行 sync 和 `dry-run`,然后检查 plugin 结果与错误 +- Pipeline:运行 `install` 和 `dry-run`,然后检查 plugin 结果与错误 - Config:查看或编辑配置 - Plugins / Files / Logs:plugin 结果、文件视图和日志检查 - Settings:桌面端偏好设置与配置项 ## 与 CLI 的关系 -桌面页面并不会定义另一套独立的 sync 规则。它们主要是通过 bridge layer 调用更底层的能力,并把命令式流程转换成页面式工作流。 +桌面页面并不会定义另一套独立的 install 规则。它们主要是通过 bridge layer 调用更底层的能力,并把命令式流程转换成页面式工作流。 所以当你遇到下面这些问题时,仍然应该优先回到 CLI 文档: diff --git a/doc/content/index.mdx b/doc/content/index.mdx index af0dcf70..be7688ff 100644 --- a/doc/content/index.mdx +++ b/doc/content/index.mdx @@ -18,7 +18,7 @@ keywords: | 部分 | 核心问题 | 入口 | | --- | --- | --- | | 快速指南 | 我应该从 CLI、GUI 还是 MCP 开始,每条路径最短的起步方式是什么? | [快速指南](/docs/quick-guide) | -| CLI | 我该如何安装、准备项目、运行同步,并理解命令和配置字段? | [CLI](/docs/cli) | +| CLI | 我该如何安装、准备项目、运行 `tnmsc install`,并理解命令和配置字段? | [CLI](/docs/cli) | | SDK | 为什么 `sdk/` 是混合核心,它负责什么,内部使用方应该如何依赖它? | [SDK](/docs/sdk) | | MCP | `memory-sync-mcp` 是什么,它暴露了哪些工具,应该如何集成? | [MCP](/docs/mcp) | | GUI | 桌面层负责什么、有哪些页面,以及它如何与 `sdk/`、`tnmsc` crate 和 CLI 协作? | [GUI](/docs/gui) | @@ -28,7 +28,7 @@ keywords: ## 从哪里开始 - 如果你是第一次打开这个文档站,先看 [快速指南](/docs/quick-guide),判断自己需要走 CLI、GUI 还是 MCP 路径。 -- 如果你想立刻把 `memory-sync` 用起来,继续进入 [CLI](/docs/cli),先把安装、项目准备和第一次同步跑通。 +- 如果你想立刻把 `memory-sync` 用起来,继续进入 [CLI](/docs/cli),先把安装、项目准备和第一次安装流程跑通。 - 如果你需要理解内部核心是如何拆分的,以及为什么 `sdk/` 现在成了共享中心,就看 [SDK](/docs/sdk)。 - 如果你想把 `memory-sync-mcp` 集成到支持 MCP 的宿主里,直接跳到 [MCP](/docs/mcp)。 - 如果你更关心桌面应用而不是终端界面,就打开 [GUI](/docs/gui)。 diff --git a/doc/content/mcp/index.mdx b/doc/content/mcp/index.mdx index 8b4bc259..902de6b8 100644 --- a/doc/content/mcp/index.mdx +++ b/doc/content/mcp/index.mdx @@ -12,7 +12,7 @@ status: stable ## 它负责什么 - 它把 `memory-sync` 的 prompt 管理能力以 MCP stdio server 的形式暴露出来 -- 它复用 `@truenine/memory-sync-sdk` 导出的 prompt service,而不是重新实现一套独立逻辑 +- 它通过 `@truenine/memory-sync-sdk` 的最小 binding 访问 prompt 管理能力,而不是重新实现一套独立逻辑 - 它让支持 MCP 的宿主可以通过 tool call 读取、更新并回写 prompt 资产 ## 它不负责什么 diff --git a/doc/content/quick-guide/aindex-and-config.mdx b/doc/content/quick-guide/aindex-and-config.mdx index 913f814c..f52737f0 100644 --- a/doc/content/quick-guide/aindex-and-config.mdx +++ b/doc/content/quick-guide/aindex-and-config.mdx @@ -281,6 +281,6 @@ aindex 内容树位于: ## 接下来读什么 -- 如果你想了解命令工作流,继续看[第一次同步](/docs/cli/first-sync) +- 如果你想了解命令工作流,继续看[第一次安装流程](/docs/cli/first-sync) - 如果你需要插件装配说明,阅读 [plugin.config.ts](/docs/cli/plugin-config) - 如果你需要清理或输出边界行为,继续看 [CLI](/docs/cli) diff --git a/doc/content/quick-guide/index.mdx b/doc/content/quick-guide/index.mdx index e37937cf..f9a75b8a 100644 --- a/doc/content/quick-guide/index.mdx +++ b/doc/content/quick-guide/index.mdx @@ -33,8 +33,8 @@ flowchart LR; | 你的目标 | 去哪里 | 原因 | | --- | --- | --- | -| 在终端里同步 prompts、rules、skills、commands 或 project memory | [CLI](/docs/cli) | 真实的命令界面、schema、输出范围和清理边界都在那里核实。 | -| 在桌面应用里编辑配置、触发执行并查看日志 | [GUI](/docs/gui) | `gui/` 负责桌面工作流,但执行仍依赖 `sdk/` 中的 `tnmsc` crate。 | +| 在终端里运行 `tnmsc install` 来分发 prompts、rules、skills、commands 或 project memory | [CLI](/docs/cli) | 真实的命令界面、schema、输出范围和清理边界都在那里核实。 | +| 在桌面应用里编辑配置、触发 install 并查看日志 | [GUI](/docs/gui) | `gui/` 负责桌面工作流,但 install 仍依赖 `sdk/` 中的 `tnmsc` crate。 | | 把 `memory-sync-mcp` 连接到支持 MCP 的宿主 | [MCP](/docs/mcp) | 这一部分重点说明 stdio 服务端、工具列表和 `workspaceDir` 语义。 | | 在使用任何东西之前先理解仓库架构 | [SDK](/docs/sdk) 和 [技术细节](/docs/technical-details) | 前者解释混合核心边界,后者解释事实来源模型和同步流水线。 | @@ -52,7 +52,7 @@ flowchart LR; 1. 阅读 [安装与要求](/docs/cli/install)。 2. 接着看 [aindex 与 `.tnmsc.json`](/docs/quick-guide/aindex-and-config)。 -3. 然后使用 [第一次同步](/docs/cli/first-sync) 实际跑通一次完整流程。 +3. 然后使用 [第一次安装流程](/docs/cli/first-sync) 实际跑通一次完整流程。 ### 如果你从 GUI 开始 diff --git a/doc/content/quick-guide/quick-install.mdx b/doc/content/quick-guide/quick-install.mdx index fc1b8333..c58cac80 100644 --- a/doc/content/quick-guide/quick-install.mdx +++ b/doc/content/quick-guide/quick-install.mdx @@ -28,7 +28,7 @@ keywords: tnmsc help ``` -你应该能看到 `dry-run`、`clean` 和 `config`。 +你应该能看到 `install`、`dry-run`、`clean` 和 `plugins`。 ## 本地 monorepo 开发 @@ -44,6 +44,6 @@ pnpm -C cli exec node dist/index.mjs --help ## 接下来读什么 -- 如果你想把真实流程完整跑一遍,继续看 [第一次同步](/docs/cli/first-sync)。 +- 如果你想把真实流程完整跑一遍,继续看 [第一次安装流程](/docs/cli/first-sync)。 - 如果你还需要 Node、pnpm 和 Rust 的版本要求,继续看 [安装与要求](/docs/cli/install)。 - 如果你还没决定走 CLI、GUI 还是 MCP,回到 [快速指南](/docs/quick-guide)。 diff --git a/doc/content/sdk/index.mdx b/doc/content/sdk/index.mdx index ccf29156..b3272ed8 100644 --- a/doc/content/sdk/index.mdx +++ b/doc/content/sdk/index.mdx @@ -13,8 +13,8 @@ status: stable - 它包含私有 npm 包 `@truenine/memory-sync-sdk` - 它包含 Rust crate `tnmsc` 的实际 workspace 路径 -- 它负责 TypeScript 同步流水线、prompt service、schema 生成和 `plugin-runtime` -- 它负责 Rust library、NAPI 构建、Node bridge runtime,以及嵌入式 runtime 逻辑 +- 它负责 Rust crate `tnmsc` 的 facade、NAPI 构建、prompt service、schema 生成,以及最小化的 TypeScript loader 入口 +- 它仍然承接少量过渡期 bridge 逻辑,但这些 bridge 路径已经退回到 `sdk/` 内部实现,不再是 `cli/` 的公共组合中心 - 它是 `mcp/`、`gui/` 以及未来内部消费者的默认依赖入口 ## 这一层不负责什么 @@ -39,7 +39,7 @@ status: stable | 消费者 | 依赖方式 | | --- | --- | | `cli/` | 一个对 `sdk/` 导出能力做轻量封装的 shell 和兼容层 | -| `mcp/` | 直接从 `@truenine/memory-sync-sdk` 导入 prompt service | +| `mcp/` | 通过 `@truenine/memory-sync-sdk` 的最小 binding 访问 prompt 管理能力 | | `gui/src-tauri` | 继续依赖 crate `tnmsc`,其实际路径现在位于 `sdk/` 下 | ## 边界规则 diff --git a/doc/content/technical-details/documentation-components.mdx b/doc/content/technical-details/documentation-components.mdx index 1ae3fb6f..47d9ee1c 100644 --- a/doc/content/technical-details/documentation-components.mdx +++ b/doc/content/technical-details/documentation-components.mdx @@ -17,9 +17,9 @@ status: stable items={[ { tool: "CLI", - summary: "负责 sync 与 clean 执行的用户入口。", + summary: "负责 install 与 clean 执行的用户入口。", status: "stable", - capabilities: ["运行默认 sync 流水线", "支持 dry-run / clean / config", "暴露 npm 与 crate 入口"], + capabilities: ["运行默认 install 流水线", "支持 install / dry-run / clean / plugins", "暴露 npm 与 crate 入口"], surfaces: ["tnmsc", "pnpm -C cli exec node dist/index.mjs --help"], notes: "对用户来说,真正的执行入口仍然是 CLI,而不是文档站或 GUI。", }, diff --git a/doc/lib/site.ts b/doc/lib/site.ts index c3b3878a..2a07ba96 100644 --- a/doc/lib/site.ts +++ b/doc/lib/site.ts @@ -68,7 +68,7 @@ export const homeEntryCards = [ { href: '/docs/cli', title: 'CLI', - detail: 'Organized around installation, project setup, the first sync run, configuration fields, and the exposed command surface.' + detail: 'Organized around installation, project setup, the first install run, configuration fields, and the exposed command surface.' }, { href: '/docs/sdk', @@ -88,7 +88,7 @@ export const homeEntryCards = [ { href: '/docs/technical-details', title: 'Technical Details', - detail: 'Concentrates the architecture boundaries, the sync pipeline, the source-of-truth model, and authoring conventions.' + detail: 'Concentrates the architecture boundaries, the install pipeline, the source-of-truth model, and authoring conventions.' }, { href: '/docs/design-rationale', @@ -126,7 +126,7 @@ export const readingPath = [ step: '01', href: '/docs/quick-guide', title: 'Choose Your Entry Point', - description: 'Decide whether you are starting from terminal sync, the desktop workflow, or MCP integration.' + description: 'Decide whether you are starting from terminal install, the desktop workflow, or MCP integration.' }, { step: '02', @@ -149,7 +149,7 @@ export const readingPath = [ { step: '05', href: '/docs/cli/cli-commands', - title: 'Run dry-run and Sync', + title: 'Run dry-run and Install', description: 'Validate the output list, scope, and cleanup behavior before writing into target tools.' } ] as const diff --git a/doc/package.json b/doc/package.json index d74c18e5..fce7cf8c 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "private": true, "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", "engines": { diff --git a/gui/package.json b/gui/package.json index a1e7ffc1..e39ea294 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "private": true, "engines": { "node": ">= 22" @@ -16,6 +16,8 @@ "tauri:build": "tauri build", "generate:routes": "tsx scripts/generate-routes.ts", "check:type": "tsc --noEmit", + "lint": "cargo fmt --check --manifest-path src-tauri/Cargo.toml", + "lint:fix": "cargo fmt --manifest-path src-tauri/Cargo.toml", "test:ui": "vitest --run", "test:tauri": "cargo test --manifest-path src-tauri/Cargo.toml --lib --bins --tests", "test": "pnpm run test:ui && pnpm tsx ./scripts/run-tauri-tests.ts" diff --git a/gui/scripts/generate-icons.ts b/gui/scripts/generate-icons.ts index c408eb76..2e488508 100644 --- a/gui/scripts/generate-icons.ts +++ b/gui/scripts/generate-icons.ts @@ -1,6 +1,9 @@ import { execSync } from 'node:child_process' import { dirname, join } from 'node:path' import { fileURLToPath } from 'node:url' +import markdownOutput from '../../scripts/markdown-output' + +const {writeError, writeMarkdownBlock} = markdownOutput const __dirname = dirname(fileURLToPath(import.meta.url)) const rootDir = join(__dirname, '..') @@ -17,9 +20,14 @@ async function main() { encoding: 'utf-8', } ) - console.log('✓ Icons generated successfully') + writeMarkdownBlock('Icon generation complete', { + source: sourceIcon, + output: iconsDir, + }) } catch (error) { - console.error('✗ Failed to generate icons') + writeError('Icon generation failed', { + error: error instanceof Error ? error.message : String(error), + }) process.exit(1) } } diff --git a/gui/scripts/generate-routes.ts b/gui/scripts/generate-routes.ts index 1a30e010..c882d80a 100644 --- a/gui/scripts/generate-routes.ts +++ b/gui/scripts/generate-routes.ts @@ -1,6 +1,9 @@ #!/usr/bin/env tsx import { Generator, getConfig } from '@tanstack/router-generator' import { resolve } from 'node:path' +import markdownOutput from '../../scripts/markdown-output' + +const {writeMarkdownBlock} = markdownOutput const root = resolve(import.meta.dirname, '..') @@ -13,4 +16,6 @@ const config = await getConfig({ const gen = new Generator({ config, root }) await gen.run() -console.log('[generate-routes] routeTree.gen.ts updated') +writeMarkdownBlock('Route tree updated', { + output: resolve(root, 'src/routeTree.gen.ts'), +}) diff --git a/gui/scripts/run-tauri-tests.ts b/gui/scripts/run-tauri-tests.ts index 3647ab2b..144091a3 100644 --- a/gui/scripts/run-tauri-tests.ts +++ b/gui/scripts/run-tauri-tests.ts @@ -1,4 +1,7 @@ import {spawnSync} from 'node:child_process' +import markdownOutput from '../../scripts/markdown-output' + +const {writeError, writeWarning} = markdownOutput function cargoAvailable(): boolean { const result = spawnSync('cargo', ['--version'], { @@ -12,8 +15,9 @@ if (!cargoAvailable()) { // Skip Tauri tests when Rust toolchain is not installed locally so that // JS/Vitest tests can still pass. CI or dev machines with cargo installed // will still run the full `test:tauri` suite. - // eslint-disable-next-line no-console - console.warn('[memory-sync-gui] cargo not found on PATH, skipping Tauri tests (test:tauri).') + writeWarning('Skipping Tauri tests', { + reason: 'cargo is not available on PATH.', + }) process.exit(0) } @@ -23,10 +27,10 @@ const child = spawnSync('pnpm', ['run', 'test:tauri'], { }) if (child.error != null) { - // eslint-disable-next-line no-console - console.error('[memory-sync-gui] Failed to run pnpm test:tauri:', child.error) + writeError('Failed to run `pnpm test:tauri`', { + error: child.error.message, + }) process.exit(1) } process.exit(child.status ?? 1) - diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index b04c588c..003315de 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "memory-sync-gui" -version = "2026.10408.12323" +version = "2026.10411.10132" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/build.rs b/gui/src-tauri/build.rs index d860e1e6..795b9b7c 100644 --- a/gui/src-tauri/build.rs +++ b/gui/src-tauri/build.rs @@ -1,3 +1,3 @@ fn main() { - tauri_build::build() + tauri_build::build() } diff --git a/gui/src-tauri/src/commands.rs b/gui/src-tauri/src/commands.rs index 991e01ae..be281f53 100644 --- a/gui/src-tauri/src/commands.rs +++ b/gui/src-tauri/src/commands.rs @@ -1,28 +1,26 @@ -/// Tauri commands that bridge the frontend to the `tnmsc` CLI. +/// Tauri commands that bridge the frontend to the `tnmsc` crate facade. /// -/// Commands use the `tnmsc` crate's library API for direct in-process invocation. -/// Bridge commands (execute, dry-run, clean, plugins) still spawn a Node.js subprocess -/// internally via `tnmsc::run_bridge_command`, but the GUI no longer searches for or -/// invokes the CLI binary as a sidecar. +/// Core install / clean / config / plugin operations run through direct crate APIs. +/// The log viewer still uses the legacy bridge path until command streaming moves into Rust. use std::path::{Path, PathBuf}; use std::process::Command as StdCommand; use serde::{Deserialize, Serialize}; -use serde_json::{Map, Value}; +use serde_json::Value; use tnmsc::core::config as core_config; const PRIMARY_SOURCE_MDX_EXTENSION: &str = ".src.mdx"; const SOURCE_MDX_FILE_TYPE: &str = "sourceMdx"; const PROJECT_SERIES_CATEGORIES: [&str; 3] = ["app", "ext", "arch"]; -const INTERNAL_BRIDGE_JSON_FLAG: &str = "--bridge-json"; fn has_source_mdx_extension(name: &str) -> bool { - name.ends_with(PRIMARY_SOURCE_MDX_EXTENSION) + name.ends_with(PRIMARY_SOURCE_MDX_EXTENSION) } fn replace_source_mdx_extension(path: &str) -> Option { - path.strip_suffix(PRIMARY_SOURCE_MDX_EXTENSION) - .map(|without_extension| format!("{without_extension}.mdx")) + path + .strip_suffix(PRIMARY_SOURCE_MDX_EXTENSION) + .map(|without_extension| format!("{without_extension}.mdx")) } // --------------------------------------------------------------------------- @@ -33,319 +31,223 @@ fn replace_source_mdx_extension(path: &str) -> Option { #[derive(Debug, Clone, Serialize, Deserialize, Default)] #[serde(rename_all = "camelCase")] pub struct PipelineResult { - pub success: bool, - #[serde(default)] - pub total_files: i32, - #[serde(default)] - pub total_dirs: i32, - #[serde(default)] - pub dry_run: bool, - #[serde(skip_serializing_if = "Option::is_none")] - pub command: Option, - #[serde(default)] - pub plugin_results: Vec, - #[serde(default)] - pub logs: Vec, - #[serde(default)] - pub errors: Vec, + pub success: bool, + #[serde(default)] + pub total_files: i32, + #[serde(default)] + pub total_dirs: i32, + #[serde(default)] + pub dry_run: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub command: Option, + #[serde(default)] + pub plugin_results: Vec, + #[serde(default)] + pub logs: Vec, + #[serde(default)] + pub errors: Vec, } /// Per-plugin execution result extracted from log lines. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct PluginExecutionResult { - pub plugin: String, - #[serde(default)] - pub files: i32, - #[serde(default)] - pub dirs: i32, - #[serde(default)] - pub dry_run: bool, + pub plugin: String, + #[serde(default)] + pub files: i32, + #[serde(default)] + pub dirs: i32, + #[serde(default)] + pub dry_run: bool, } /// A single parsed log entry from the CLI. #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct LogEntry { - pub timestamp: String, - pub level: String, - pub logger: String, - pub payload: serde_json::Value, -} - -#[derive(Debug, Clone, Deserialize)] -#[serde(rename_all = "camelCase")] -struct BridgeJsonCommandResult { - success: bool, - #[serde(default)] - files_affected: i32, - #[serde(default)] - dirs_affected: i32, - #[serde(default)] - message: Option, - #[serde(default)] - warnings: Vec, - #[serde(default)] - errors: Vec, -} - -#[derive(Debug, Clone, Deserialize)] -struct PluginListEntry { - name: String, + pub stream: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub source: Option, + pub markdown: String, } // --------------------------------------------------------------------------- // Tauri commands // --------------------------------------------------------------------------- -/// Execute the sync pipeline (default command) or dry-run. +/// Execute the install pipeline (default command) or dry-run. #[tauri::command] -pub fn execute_pipeline(cwd: String, dry_run: bool) -> Result { - let subcommand = if dry_run { "dry-run" } else { "execute" }; - let result = tnmsc::run_bridge_command(subcommand, Path::new(&cwd), &[INTERNAL_BRIDGE_JSON_FLAG]) - .map_err(|e| e.to_string())?; - parse_pipeline_result(&result.stdout, subcommand, dry_run) +pub fn install_pipeline(cwd: String, dry_run: bool) -> Result { + let command_name = if dry_run { "dry-run" } else { "install" }; + let options = tnmsc::MemorySyncCommandOptions { + cwd: Some(cwd), + ..Default::default() + }; + let result = if dry_run { + tnmsc::dry_run(options) + } else { + tnmsc::install(options) + } + .map_err(|error| error.to_string())?; + + Ok(to_pipeline_result(&result, command_name, dry_run)) } /// Load the merged configuration via the tnmsc library API. #[tauri::command] pub fn load_config(cwd: String) -> Result { - let result = tnmsc::load_config(Path::new(&cwd)).map_err(|e| e.to_string())?; - serde_json::to_value(&result.config).map_err(|e| e.to_string()) + let result = tnmsc::load_config(Path::new(&cwd)).map_err(|e| e.to_string())?; + serde_json::to_value(&result).map_err(|e| e.to_string()) } -/// List all registered plugins via the tnmsc bridge command. +/// List all registered plugins from the crate-owned registry. #[tauri::command] -pub fn list_plugins(cwd: String) -> Result, String> { - let result = tnmsc::run_bridge_command("plugins", Path::new(&cwd), &[INTERNAL_BRIDGE_JSON_FLAG]) - .map_err(|e| e.to_string())?; - let plugins = serde_json::from_str::>(&result.stdout) - .map_err(|e| format!("Failed to parse plugins output: {e}"))?; - Ok(plugins - .into_iter() - .map(|plugin| PluginExecutionResult { - plugin: plugin.name, - files: 0, - dirs: 0, - dry_run: false, - }) - .collect()) +pub fn list_plugins(_cwd: String) -> Result, String> { + Ok( + tnmsc::list_plugins() + .into_iter() + .map(|plugin| PluginExecutionResult { + plugin: plugin.name, + files: 0, + dirs: 0, + dry_run: false, + }) + .collect(), + ) } /// Clean previously generated output files. #[tauri::command] pub fn clean_outputs(cwd: String, dry_run: bool) -> Result { - let subcommand = if dry_run { "dry-run-clean" } else { "clean" }; - let result = tnmsc::run_bridge_command(subcommand, Path::new(&cwd), &[INTERNAL_BRIDGE_JSON_FLAG]) - .map_err(|e| e.to_string())?; - parse_pipeline_result(&result.stdout, subcommand, dry_run) + let command_name = if dry_run { "dry-run-clean" } else { "clean" }; + let result = tnmsc::clean(tnmsc::MemorySyncCommandOptions { + cwd: Some(cwd), + dry_run: Some(dry_run), + ..Default::default() + }) + .map_err(|error| error.to_string())?; + + Ok(to_pipeline_result(&result, command_name, dry_run)) } -/// Get log output from a CLI bridge command. +/// Get log output from a CLI command. /// -/// Runs the given command via `tnmsc::run_bridge_command` in non-JSON mode and -/// parses the stderr output as log entries. Falls back to parsing stdout if -/// stderr yields no entries. +/// Previously used the legacy Node bridge; now returns an empty log stream +/// until command logging is fully implemented in Rust. #[tauri::command] -pub fn get_logs(cwd: String, command: String) -> Result, String> { - let args: Vec<&str> = command.split_whitespace().collect(); - let subcommand = args.first().copied().unwrap_or("execute"); - let extra_args: Vec<&str> = args.iter().skip(1).copied().collect(); - let result = tnmsc::run_bridge_command(subcommand, Path::new(&cwd), &extra_args) - .map_err(|e| e.to_string())?; - let logs = parse_log_lines(&result.stderr); - if logs.is_empty() { - Ok(parse_log_lines(&result.stdout)) - } else { - Ok(logs) - } +pub fn get_logs(_cwd: String, _command: String) -> Result, String> { + Ok(Vec::new()) } -fn parse_pipeline_result(raw: &str, command: &str, dry_run: bool) -> Result { - let parsed = serde_json::from_str::(raw) - .map_err(|e| format!("Failed to parse bridge result: {e}"))?; - - Ok(PipelineResult { - success: parsed.success, - total_files: parsed.files_affected, - total_dirs: parsed.dirs_affected, - dry_run, - command: Some(command.to_string()), - plugin_results: Vec::new(), - logs: Vec::new(), - errors: collect_bridge_messages(&parsed), - }) +fn to_pipeline_result( + result: &tnmsc::MemorySyncCommandResult, + command: &str, + dry_run: bool, +) -> PipelineResult { + PipelineResult { + success: result.success, + total_files: result.files_affected, + total_dirs: result.dirs_affected, + dry_run, + command: Some(command.to_string()), + plugin_results: Vec::new(), + logs: Vec::new(), + errors: collect_bridge_messages(result), + } } -fn collect_bridge_messages(result: &BridgeJsonCommandResult) -> Vec { - let mut messages = Vec::new(); +fn collect_bridge_messages(result: &tnmsc::MemorySyncCommandResult) -> Vec { + let mut messages = Vec::new(); - if let Some(message) = result.message.as_ref() - && !message.is_empty() - { - messages.push(message.clone()); - } + if let Some(message) = result.message.as_ref() + && !message.is_empty() + { + messages.push(message.clone()); + } - for diagnostic in &result.errors { - if let Some(message) = extract_diagnostic_message(diagnostic) { - messages.push(message); - } + for diagnostic in &result.errors { + if let Some(message) = extract_diagnostic_message(diagnostic) { + messages.push(message); } + } - for diagnostic in &result.warnings { - if let Some(message) = extract_diagnostic_message(diagnostic) { - messages.push(message); - } + for diagnostic in &result.warnings { + if let Some(message) = extract_diagnostic_message(diagnostic) { + messages.push(message); } + } - messages + messages } fn extract_diagnostic_message(diagnostic: &Value) -> Option { - let object = diagnostic.as_object()?; - if let Some(copy_text) = object.get("copyText").and_then(Value::as_array) { - let lines = copy_text - .iter() - .filter_map(Value::as_str) - .map(ToOwned::to_owned) - .collect::>(); - if !lines.is_empty() { - return Some(lines.join("\n")); - } - } - - let title = object.get("title").and_then(Value::as_str)?; - let code = object.get("code").and_then(Value::as_str).unwrap_or("DIAGNOSTIC"); - Some(format!("[{code}] {title}")) -} - -/// Parse markdown-style log output into lightweight GUI log entries. -fn parse_log_lines(raw: &str) -> Vec { - let mut entries = Vec::new(); - let mut current: Option = None; - - for raw_line in raw.lines() { - let line = raw_line.trim_end(); - if let Some((level, logger, message)) = parse_log_header(line) { - if let Some(entry) = current.take() { - entries.push(entry); - } - - let mut payload = Map::new(); - if let Some(message) = message { - payload.insert("message".to_string(), Value::String(message)); - } - - current = Some(LogEntry { - timestamp: String::new(), - level, - logger, - payload: Value::Object(payload), - }); - continue; - } - - if let Some(entry) = current.as_mut() { - append_log_body_line(&mut entry.payload, line); - } - } - - if let Some(entry) = current.take() { - entries.push(entry); - } - - entries -} - -fn parse_log_header(line: &str) -> Option<(String, String, Option)> { - if !line.starts_with("**") { - return None; - } - - let remainder = line.strip_prefix("**")?; - let level_end = remainder.find("**")?; - let level = remainder[..level_end].trim().to_string(); - let after_level = remainder[level_end + 2..].trim_start(); - let logger_start = after_level.find('`')?; - let after_logger_start = &after_level[logger_start + 1..]; - let logger_end = after_logger_start.find('`')?; - let logger = after_logger_start[..logger_end].to_string(); - let message = after_logger_start[logger_end + 1..].trim(); - - Some(( - level, - logger, - if message.is_empty() { - None - } else { - Some(message.to_string()) - }, - )) -} - -fn append_log_body_line(payload: &mut Value, line: &str) { - let object = match payload { - Value::Object(object) => object, - _ => return, - }; - - let entry = object - .entry("body".to_string()) - .or_insert_with(|| Value::Array(Vec::new())); - if let Value::Array(lines) = entry - && !line.trim().is_empty() - { - lines.push(Value::String(line.trim().to_string())); + let object = diagnostic.as_object()?; + if let Some(copy_text) = object.get("copyText").and_then(Value::as_array) { + let lines = copy_text + .iter() + .filter_map(Value::as_str) + .map(ToOwned::to_owned) + .collect::>(); + if !lines.is_empty() { + return Some(lines.join("\n")); } + } + + let title = object.get("title").and_then(Value::as_str)?; + let code = object + .get("code") + .and_then(Value::as_str) + .unwrap_or("DIAGNOSTIC"); + Some(format!("[{code}] {title}")) } /// Resolve the canonical global config file path. fn resolve_global_config_path() -> Result { - let home = dirs::home_dir().ok_or("Cannot determine home directory")?; - Ok(home.join(".aindex").join(".tnmsc.json")) + let home = dirs::home_dir().ok_or("Cannot determine home directory")?; + Ok(home.join(".aindex").join(".tnmsc.json")) } /// Read a config file's raw content. Returns empty string if file doesn't exist. #[tauri::command] pub fn read_config_file() -> Result { - let path = resolve_global_config_path()?; - if !path.exists() { - return Ok(String::new()); - } - std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {e}", path.display())) + let path = resolve_global_config_path()?; + if !path.exists() { + return Ok(String::new()); + } + std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {e}", path.display())) } /// Write content to the canonical global config file. Creates parent directories if needed. #[tauri::command] pub fn write_config_file(content: String) -> Result<(), String> { - let path = resolve_global_config_path()?; - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .map_err(|e| format!("Failed to create directory {}: {e}", parent.display()))?; - } - std::fs::write(&path, &content).map_err(|e| format!("Failed to write {}: {e}", path.display())) + let path = resolve_global_config_path()?; + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create directory {}: {e}", parent.display()))?; + } + std::fs::write(&path, &content).map_err(|e| format!("Failed to write {}: {e}", path.display())) } /// Open the global config directory in the system file manager. #[tauri::command] pub fn open_config_dir() -> Result { - let home = dirs::home_dir().ok_or("Cannot determine home directory")?; - let config_dir = home.join(".aindex"); - if !config_dir.exists() { - std::fs::create_dir_all(&config_dir) - .map_err(|e| format!("Failed to create directory {}: {e}", config_dir.display()))?; - } - let path_str = config_dir.to_string_lossy().to_string(); - - #[cfg(target_os = "linux")] - let result = StdCommand::new("xdg-open").arg(&config_dir).spawn(); - #[cfg(target_os = "macos")] - let result = StdCommand::new("open").arg(&config_dir).spawn(); - #[cfg(target_os = "windows")] - let result = StdCommand::new("explorer").arg(&config_dir).spawn(); - - result.map_err(|e| format!("Failed to open directory: {e}"))?; - Ok(path_str) + let home = dirs::home_dir().ok_or("Cannot determine home directory")?; + let config_dir = home.join(".aindex"); + if !config_dir.exists() { + std::fs::create_dir_all(&config_dir) + .map_err(|e| format!("Failed to create directory {}: {e}", config_dir.display()))?; + } + let path_str = config_dir.to_string_lossy().to_string(); + + #[cfg(target_os = "linux")] + let result = StdCommand::new("xdg-open").arg(&config_dir).spawn(); + #[cfg(target_os = "macos")] + let result = StdCommand::new("open").arg(&config_dir).spawn(); + #[cfg(target_os = "windows")] + let result = StdCommand::new("explorer").arg(&config_dir).spawn(); + + result.map_err(|e| format!("Failed to open directory: {e}"))?; + Ok(path_str) } // --------------------------------------------------------------------------- @@ -356,339 +258,337 @@ pub fn open_config_dir() -> Result { #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct AindexFileEntry { - /// Relative path from aindex root, e.g. "app/TrueNine/agt.src.mdx" - pub source_path: String, - /// Relative path of translated file (empty for resource files) - pub translated_path: String, - /// Whether the translated file exists on disk - pub translated_exists: bool, - /// "sourceMdx" for source+translated pairs, "resource" for other files - pub file_type: String, + /// Relative path from aindex root, e.g. "app/TrueNine/agt.src.mdx" + pub source_path: String, + /// Relative path of translated file (empty for resource files) + pub translated_path: String, + /// Whether the translated file exists on disk + pub translated_exists: bool, + /// "sourceMdx" for source+translated pairs, "resource" for other files + pub file_type: String, } /// Parsed global config with resolved paths. struct ResolvedConfig { - aindex_root: PathBuf, - config: tnmsc::core::config::UserConfigFile, + aindex_root: PathBuf, + config: tnmsc::core::config::UserConfigFile, } struct CategoryPaths { - source_rel: String, - translated_rel: String, + source_rel: String, + translated_rel: String, } fn resolve_category_paths( - config: &tnmsc::core::config::UserConfigFile, - category: &str, + config: &tnmsc::core::config::UserConfigFile, + category: &str, ) -> Result { - let aindex = &config.aindex; - - let resolve_pair = |pair: Option<&tnmsc::core::config::DirPair>, - default_source: &str, - default_translated: &str| - -> CategoryPaths { - CategoryPaths { - source_rel: pair - .and_then(|value| value.src.as_deref()) - .unwrap_or(default_source) - .to_string(), - translated_rel: pair - .and_then(|value| value.dist.as_deref()) - .unwrap_or(default_translated) - .to_string(), - } - }; - - match category { - "skills" => Ok(resolve_pair( - aindex.skills.as_ref(), - core_config::DEFAULT_SKILLS_SRC_DIR, - core_config::DEFAULT_SKILLS_DIST_DIR, - )), - "commands" => Ok(resolve_pair( - aindex.commands.as_ref(), - core_config::DEFAULT_COMMANDS_SRC_DIR, - core_config::DEFAULT_COMMANDS_DIST_DIR, - )), - "agents" => Ok(resolve_pair( - aindex.sub_agents.as_ref(), - core_config::DEFAULT_SUB_AGENTS_SRC_DIR, - core_config::DEFAULT_SUB_AGENTS_DIST_DIR, - )), - "rules" => Ok(resolve_pair( - aindex.rules.as_ref(), - core_config::DEFAULT_RULES_SRC_DIR, - core_config::DEFAULT_RULES_DIST_DIR, - )), - "app" => Ok(resolve_pair( - aindex.app.as_ref(), - core_config::DEFAULT_APP_SRC_DIR, - core_config::DEFAULT_APP_DIST_DIR, - )), - "ext" => Ok(resolve_pair( - aindex.ext.as_ref(), - core_config::DEFAULT_EXT_SRC_DIR, - core_config::DEFAULT_EXT_DIST_DIR, - )), - "arch" => Ok(resolve_pair( - aindex.arch.as_ref(), - core_config::DEFAULT_ARCH_SRC_DIR, - core_config::DEFAULT_ARCH_DIST_DIR, - )), - _ => Err(format!("Unknown category: {category}")), + let aindex = &config.aindex; + + let resolve_pair = |pair: Option<&tnmsc::core::config::DirPair>, + default_source: &str, + default_translated: &str| + -> CategoryPaths { + CategoryPaths { + source_rel: pair + .and_then(|value| value.src.as_deref()) + .unwrap_or(default_source) + .to_string(), + translated_rel: pair + .and_then(|value| value.dist.as_deref()) + .unwrap_or(default_translated) + .to_string(), } + }; + + match category { + "skills" => Ok(resolve_pair( + aindex.skills.as_ref(), + core_config::DEFAULT_SKILLS_SRC_DIR, + core_config::DEFAULT_SKILLS_DIST_DIR, + )), + "commands" => Ok(resolve_pair( + aindex.commands.as_ref(), + core_config::DEFAULT_COMMANDS_SRC_DIR, + core_config::DEFAULT_COMMANDS_DIST_DIR, + )), + "agents" => Ok(resolve_pair( + aindex.sub_agents.as_ref(), + core_config::DEFAULT_SUB_AGENTS_SRC_DIR, + core_config::DEFAULT_SUB_AGENTS_DIST_DIR, + )), + "rules" => Ok(resolve_pair( + aindex.rules.as_ref(), + core_config::DEFAULT_RULES_SRC_DIR, + core_config::DEFAULT_RULES_DIST_DIR, + )), + "app" => Ok(resolve_pair( + aindex.app.as_ref(), + core_config::DEFAULT_APP_SRC_DIR, + core_config::DEFAULT_APP_DIST_DIR, + )), + "ext" => Ok(resolve_pair( + aindex.ext.as_ref(), + core_config::DEFAULT_EXT_SRC_DIR, + core_config::DEFAULT_EXT_DIST_DIR, + )), + "arch" => Ok(resolve_pair( + aindex.arch.as_ref(), + core_config::DEFAULT_ARCH_SRC_DIR, + core_config::DEFAULT_ARCH_DIST_DIR, + )), + _ => Err(format!("Unknown category: {category}")), + } } fn collect_project_series_category_files( - src_dir: &std::path::Path, - base: &std::path::Path, - translated_root_rel: &str, - dist_dir: &std::path::Path, - out: &mut Vec, + src_dir: &std::path::Path, + base: &std::path::Path, + translated_root_rel: &str, + dist_dir: &std::path::Path, + out: &mut Vec, ) -> std::io::Result<()> { - if let Ok(top_entries) = std::fs::read_dir(src_dir) { - for top in top_entries.flatten() { - if top.path().is_dir() { - collect_category_source_mdx( - &top.path(), - src_dir, - base, - translated_root_rel, - dist_dir, - out, - )?; - } - } + if let Ok(top_entries) = std::fs::read_dir(src_dir) { + for top in top_entries.flatten() { + if top.path().is_dir() { + collect_category_source_mdx( + &top.path(), + src_dir, + base, + translated_root_rel, + dist_dir, + out, + )?; + } } + } - Ok(()) + Ok(()) } fn collect_root_memory_prompt_files( - base: &std::path::Path, - config: &tnmsc::core::config::UserConfigFile, - out: &mut Vec, + base: &std::path::Path, + config: &tnmsc::core::config::UserConfigFile, + out: &mut Vec, ) { - for (source_rel, translated_rel) in collect_root_memory_prompt_pairs(config) { - let source_abs = base.join(&source_rel); - if !(source_abs.exists() && source_abs.is_file()) { - continue; - } - - out.push(AindexFileEntry { - source_path: source_rel, - translated_path: translated_rel.clone(), - translated_exists: base.join(translated_rel).exists(), - file_type: SOURCE_MDX_FILE_TYPE.to_string(), - }); + for (source_rel, translated_rel) in collect_root_memory_prompt_pairs(config) { + let source_abs = base.join(&source_rel); + if !(source_abs.exists() && source_abs.is_file()) { + continue; } + + out.push(AindexFileEntry { + source_path: source_rel, + translated_path: translated_rel.clone(), + translated_exists: base.join(translated_rel).exists(), + file_type: SOURCE_MDX_FILE_TYPE.to_string(), + }); + } } fn collect_root_memory_prompt_pairs( - config: &tnmsc::core::config::UserConfigFile, + config: &tnmsc::core::config::UserConfigFile, ) -> Vec<(String, String)> { - let aindex = &config.aindex; - [ - ( - aindex.global_prompt.as_ref(), - core_config::DEFAULT_GLOBAL_PROMPT_SRC, - core_config::DEFAULT_GLOBAL_PROMPT_DIST, - ), - ( - aindex.workspace_prompt.as_ref(), - core_config::DEFAULT_WORKSPACE_PROMPT_SRC, - core_config::DEFAULT_WORKSPACE_PROMPT_DIST, - ), - ] - .into_iter() - .map(|(pair, default_source, default_dist)| { - let source_rel = pair - .and_then(|value| value.src.as_deref()) - .unwrap_or(default_source) - .replace('\\', "/"); - let translated_rel = pair - .and_then(|value| value.dist.as_deref()) - .unwrap_or(default_dist) - .replace('\\', "/"); - (source_rel, translated_rel) - }) - .collect() + let aindex = &config.aindex; + [ + ( + aindex.global_prompt.as_ref(), + core_config::DEFAULT_GLOBAL_PROMPT_SRC, + core_config::DEFAULT_GLOBAL_PROMPT_DIST, + ), + ( + aindex.workspace_prompt.as_ref(), + core_config::DEFAULT_WORKSPACE_PROMPT_SRC, + core_config::DEFAULT_WORKSPACE_PROMPT_DIST, + ), + ] + .into_iter() + .map(|(pair, default_source, default_dist)| { + let source_rel = pair + .and_then(|value| value.src.as_deref()) + .unwrap_or(default_source) + .replace('\\', "/"); + let translated_rel = pair + .and_then(|value| value.dist.as_deref()) + .unwrap_or(default_dist) + .replace('\\', "/"); + (source_rel, translated_rel) + }) + .collect() } fn collect_category_file_entries( - base: &std::path::Path, - config: &tnmsc::core::config::UserConfigFile, - category: &str, + base: &std::path::Path, + config: &tnmsc::core::config::UserConfigFile, + category: &str, ) -> Result, String> { - let paths = resolve_category_paths(config, category)?; - let dist_dir = base.join(&paths.translated_rel); - let src_dir = base.join(&paths.source_rel); - let mut entries = Vec::new(); - - if category == "app" { - collect_root_memory_prompt_files(base, config, &mut entries); - } - if src_dir.exists() { - collect_project_series_category_files( - &src_dir, - base, - &paths.translated_rel, - &dist_dir, - &mut entries, - ) - .map_err(|e| format!("Failed to scan {}: {e}", category))?; - } - - entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); - Ok(entries) + let paths = resolve_category_paths(config, category)?; + let dist_dir = base.join(&paths.translated_rel); + let src_dir = base.join(&paths.source_rel); + let mut entries = Vec::new(); + + if category == "app" { + collect_root_memory_prompt_files(base, config, &mut entries); + } + if src_dir.exists() { + collect_project_series_category_files( + &src_dir, + base, + &paths.translated_rel, + &dist_dir, + &mut entries, + ) + .map_err(|e| format!("Failed to scan {}: {e}", category))?; + } + + entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); + Ok(entries) } /// Read and resolve the merged tnmsc config for the current working directory. fn load_resolved_config(cwd: &str) -> Result { - let result = - tnmsc::load_config(Path::new(cwd)).map_err(|e| format!("Failed to load config: {e}"))?; - let config = result.config; - let workspace_dir = config.workspace_dir.as_deref().unwrap_or("."); - let workspace_dir = tnmsc::core::config::resolve_tilde(workspace_dir); - let aindex_dir = config - .aindex - .dir - .as_deref() - .unwrap_or(core_config::DEFAULT_AINDEX_DIR_NAME); - - Ok(ResolvedConfig { - aindex_root: workspace_dir.join(aindex_dir), - config, - }) + let result = + tnmsc::load_config(Path::new(cwd)).map_err(|e| format!("Failed to load config: {e}"))?; + let config = result.config; + let workspace_dir = config.workspace_dir.as_deref().unwrap_or("."); + let workspace_dir = tnmsc::core::config::resolve_tilde(workspace_dir); + let aindex_dir = config + .aindex + .dir + .as_deref() + .unwrap_or(core_config::DEFAULT_AINDEX_DIR_NAME); + + Ok(ResolvedConfig { + aindex_root: workspace_dir.join(aindex_dir), + config, + }) } /// Read the merged config and resolve the aindex root path. fn resolve_aindex_root(cwd: &str) -> Result { - let rc = load_resolved_config(cwd)?; - let path = rc.aindex_root; - if !path.exists() { - return Err(format!("Aindex directory not found: {}", path.display())); - } - Ok(path) + let rc = load_resolved_config(cwd)?; + let path = rc.aindex_root; + if !path.exists() { + return Err(format!("Aindex directory not found: {}", path.display())); + } + Ok(path) } /// Collect project-like source prompt files under `aindex/app/`, `aindex/ext/`, and `aindex/arch/`. #[tauri::command] pub fn list_aindex_files(cwd: String) -> Result, String> { - let ResolvedConfig { - aindex_root: base, - config, - } = load_resolved_config(&cwd)?; - let mut entries = Vec::new(); - collect_root_memory_prompt_files(&base, &config, &mut entries); - - for category in PROJECT_SERIES_CATEGORIES { - let paths = resolve_category_paths(&config, category)?; - let src_dir = base.join(&paths.source_rel); - if !src_dir.exists() { - continue; - } - - let dist_dir = base.join(&paths.translated_rel); - collect_project_series_category_files( - &src_dir, - &base, - &paths.translated_rel, - &dist_dir, - &mut entries, - ) - .map_err(|e| format!("Failed to scan aindex {category}: {e}"))?; + let ResolvedConfig { + aindex_root: base, + config, + } = load_resolved_config(&cwd)?; + let mut entries = Vec::new(); + collect_root_memory_prompt_files(&base, &config, &mut entries); + + for category in PROJECT_SERIES_CATEGORIES { + let paths = resolve_category_paths(&config, category)?; + let src_dir = base.join(&paths.source_rel); + if !src_dir.exists() { + continue; } - entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); - Ok(entries) + let dist_dir = base.join(&paths.translated_rel); + collect_project_series_category_files( + &src_dir, + &base, + &paths.translated_rel, + &dist_dir, + &mut entries, + ) + .map_err(|e| format!("Failed to scan aindex {category}: {e}"))?; + } + + entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); + Ok(entries) } /// Read a file relative to the aindex directory (resolved from config). #[tauri::command] pub fn read_aindex_file(cwd: String, rel_path: String) -> Result { - let base = resolve_aindex_root(&cwd)?; - let path = base.join(&rel_path); - if !path.exists() { - return Ok(String::new()); - } - std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {e}", path.display())) + let base = resolve_aindex_root(&cwd)?; + let path = base.join(&rel_path); + if !path.exists() { + return Ok(String::new()); + } + std::fs::read_to_string(&path).map_err(|e| format!("Failed to read {}: {e}", path.display())) } /// Write content to a file relative to the aindex directory (resolved from config). #[tauri::command] pub fn write_aindex_file(cwd: String, rel_path: String, content: String) -> Result<(), String> { - let base = resolve_aindex_root(&cwd)?; - let path = base.join(&rel_path); - if let Some(parent) = path.parent() { - std::fs::create_dir_all(parent) - .map_err(|e| format!("Failed to create dir {}: {e}", parent.display()))?; - } - std::fs::write(&path, &content).map_err(|e| format!("Failed to write {}: {e}", path.display())) + let base = resolve_aindex_root(&cwd)?; + let path = base.join(&rel_path); + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent) + .map_err(|e| format!("Failed to create dir {}: {e}", parent.display()))?; + } + std::fs::write(&path, &content).map_err(|e| format!("Failed to write {}: {e}", path.display())) } /// List source prompt files for a given category. /// Reads the corresponding `aindex` config field to resolve source and output directories. #[tauri::command] pub fn list_category_files(cwd: String, category: String) -> Result, String> { - let ResolvedConfig { - aindex_root: base, - config, - } = load_resolved_config(&cwd)?; - collect_category_file_entries(&base, &config, &category) + let ResolvedConfig { + aindex_root: base, + config, + } = load_resolved_config(&cwd)?; + collect_category_file_entries(&base, &config, &category) } fn collect_category_source_mdx( - dir: &std::path::Path, - src_root: &std::path::Path, - base: &std::path::Path, - translated_root_rel: &str, - dist_dir: &std::path::Path, - out: &mut Vec, + dir: &std::path::Path, + src_root: &std::path::Path, + base: &std::path::Path, + translated_root_rel: &str, + dist_dir: &std::path::Path, + out: &mut Vec, ) -> std::io::Result<()> { - for entry in std::fs::read_dir(dir)? { - let entry = entry?; - let path = entry.path(); - if path.is_dir() { - collect_category_source_mdx(&path, src_root, base, translated_root_rel, dist_dir, out)?; - } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) { - let rel = path.strip_prefix(base).unwrap_or(&path); - let source_path = rel.to_string_lossy().replace('\\', "/"); - - if has_source_mdx_extension(name) { - // Source + translated pair - let rel_from_src = path.strip_prefix(src_root).unwrap_or(&path); - let rel_str = rel_from_src - .to_string_lossy() - .replace('\\', "/") - .to_string(); - let rel_str = replace_source_mdx_extension(&rel_str).unwrap_or(rel_str); - let translated_abs = dist_dir.join(&rel_str); - let translated_path = translated_abs - .strip_prefix(base) - .map(|p| p.to_string_lossy().replace('\\', "/")) - .unwrap_or_else(|_| { - format!("{}/{}", translated_root_rel.trim_end_matches('/'), rel_str) - }); - - out.push(AindexFileEntry { - source_path, - translated_path, - translated_exists: translated_abs.exists(), - file_type: SOURCE_MDX_FILE_TYPE.to_string(), - }); - } else if !name.ends_with(".mdx") { - // Resource file — single preview only - out.push(AindexFileEntry { - source_path, - translated_path: String::new(), - translated_exists: false, - file_type: "resource".to_string(), - }); - } - } + for entry in std::fs::read_dir(dir)? { + let entry = entry?; + let path = entry.path(); + if path.is_dir() { + collect_category_source_mdx(&path, src_root, base, translated_root_rel, dist_dir, out)?; + } else if let Some(name) = path.file_name().and_then(|n| n.to_str()) { + let rel = path.strip_prefix(base).unwrap_or(&path); + let source_path = rel.to_string_lossy().replace('\\', "/"); + + if has_source_mdx_extension(name) { + // Source + translated pair + let rel_from_src = path.strip_prefix(src_root).unwrap_or(&path); + let rel_str = rel_from_src + .to_string_lossy() + .replace('\\', "/") + .to_string(); + let rel_str = replace_source_mdx_extension(&rel_str).unwrap_or(rel_str); + let translated_abs = dist_dir.join(&rel_str); + let translated_path = translated_abs + .strip_prefix(base) + .map(|p| p.to_string_lossy().replace('\\', "/")) + .unwrap_or_else(|_| format!("{}/{}", translated_root_rel.trim_end_matches('/'), rel_str)); + + out.push(AindexFileEntry { + source_path, + translated_path, + translated_exists: translated_abs.exists(), + file_type: SOURCE_MDX_FILE_TYPE.to_string(), + }); + } else if !name.ends_with(".mdx") { + // Resource file — single preview only + out.push(AindexFileEntry { + source_path, + translated_path: String::new(), + translated_exists: false, + file_type: "resource".to_string(), + }); + } } - Ok(()) + } + Ok(()) } // --------------------------------------------------------------------------- @@ -699,511 +599,510 @@ fn collect_category_source_mdx( #[derive(Debug, Clone, Serialize, Deserialize, Default)] #[serde(rename_all = "camelCase")] pub struct CategoryStats { - pub name: String, - pub file_count: u32, - pub total_chars: u64, - pub total_lines: u64, - pub source_mdx_count: u32, - pub resource_count: u32, - pub translated_count: u32, + pub name: String, + pub file_count: u32, + pub total_chars: u64, + pub total_lines: u64, + pub source_mdx_count: u32, + pub resource_count: u32, + pub translated_count: u32, } /// Per-project statistics for project-like series (`app/`, `ext/`, `arch/`). #[derive(Debug, Clone, Serialize, Deserialize, Default)] #[serde(rename_all = "camelCase")] pub struct ProjectStats { - pub name: String, - pub file_count: u32, - pub total_chars: u64, - pub total_lines: u64, + pub name: String, + pub file_count: u32, + pub total_chars: u64, + pub total_lines: u64, } /// Overall aindex statistics. #[derive(Debug, Clone, Serialize, Deserialize, Default)] #[serde(rename_all = "camelCase")] pub struct AindexStats { - pub total_files: u32, - pub total_chars: u64, - pub total_lines: u64, - pub total_source_mdx: u32, - pub total_resources: u32, - pub total_translated: u32, - pub categories: Vec, - pub projects: Vec, - /// Extension distribution: [{ ext, count }] - pub extensions: Vec, + pub total_files: u32, + pub total_chars: u64, + pub total_lines: u64, + pub total_source_mdx: u32, + pub total_resources: u32, + pub total_translated: u32, + pub categories: Vec, + pub projects: Vec, + /// Extension distribution: [{ ext, count }] + pub extensions: Vec, } #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ExtensionCount { - pub ext: String, - pub count: u32, + pub ext: String, + pub count: u32, } #[derive(Debug, Clone, Default)] struct StatAccumulator { - file_count: u32, - total_chars: u64, - total_lines: u64, - source_mdx_count: u32, - resource_count: u32, - translated_count: u32, - ext_map: std::collections::HashMap, + file_count: u32, + total_chars: u64, + total_lines: u64, + source_mdx_count: u32, + resource_count: u32, + translated_count: u32, + ext_map: std::collections::HashMap, } impl StatAccumulator { - fn add(&mut self, other: Self) { - self.file_count += other.file_count; - self.total_chars += other.total_chars; - self.total_lines += other.total_lines; - self.source_mdx_count += other.source_mdx_count; - self.resource_count += other.resource_count; - self.translated_count += other.translated_count; - for (key, value) in other.ext_map { - *self.ext_map.entry(key).or_default() += value; - } + fn add(&mut self, other: Self) { + self.file_count += other.file_count; + self.total_chars += other.total_chars; + self.total_lines += other.total_lines; + self.source_mdx_count += other.source_mdx_count; + self.resource_count += other.resource_count; + self.translated_count += other.translated_count; + for (key, value) in other.ext_map { + *self.ext_map.entry(key).or_default() += value; } + } - fn from_file(path: &std::path::Path) -> Self { - let mut stats = Self::default(); - if !path.is_file() { - return stats; - } - - stats.file_count = 1; - if let Ok(content) = std::fs::read_to_string(path) { - stats.total_chars = content.len() as u64; - stats.total_lines = content.lines().count() as u64; - } + fn from_file(path: &std::path::Path) -> Self { + let mut stats = Self::default(); + if !path.is_file() { + return stats; + } - let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); - if has_source_mdx_extension(name) { - stats.source_mdx_count = 1; - stats.ext_map.insert("src.mdx".to_string(), 1); - } else { - let ext = name.rsplit('.').next().unwrap_or("other").to_lowercase(); - stats.ext_map.insert(ext, 1); - } + stats.file_count = 1; + if let Ok(content) = std::fs::read_to_string(path) { + stats.total_chars = content.len() as u64; + stats.total_lines = content.lines().count() as u64; + } - stats + let name = path.file_name().and_then(|n| n.to_str()).unwrap_or(""); + if has_source_mdx_extension(name) { + stats.source_mdx_count = 1; + stats.ext_map.insert("src.mdx".to_string(), 1); + } else { + let ext = name.rsplit('.').next().unwrap_or("other").to_lowercase(); + stats.ext_map.insert(ext, 1); } + + stats + } } /// Recursively count files and accumulate chars/lines. fn stat_dir(dir: &std::path::Path) -> StatAccumulator { - let mut stats = StatAccumulator::default(); - if let Ok(entries) = std::fs::read_dir(dir) { - for entry in entries.flatten() { - let path = entry.path(); - if path.is_dir() { - stats.add(stat_dir(&path)); - } else if path.is_file() { - stats.add(StatAccumulator::from_file(&path)); - } - } + let mut stats = StatAccumulator::default(); + if let Ok(entries) = std::fs::read_dir(dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + stats.add(stat_dir(&path)); + } else if path.is_file() { + stats.add(StatAccumulator::from_file(&path)); + } } - stats + } + stats } fn derive_english_source_rel(source_rel: &str) -> Option { - replace_source_mdx_extension(source_rel).filter(|derived| derived != source_rel) + replace_source_mdx_extension(source_rel).filter(|derived| derived != source_rel) } fn collect_root_memory_prompt_stats( - base: &std::path::Path, - config: &tnmsc::core::config::UserConfigFile, + base: &std::path::Path, + config: &tnmsc::core::config::UserConfigFile, ) -> StatAccumulator { - let mut stats = StatAccumulator::default(); - let mut seen_paths = std::collections::HashSet::new(); - - for (source_rel, _) in collect_root_memory_prompt_pairs(config) { - for relative_path in std::iter::once(source_rel.clone()) - .chain(derive_english_source_rel(&source_rel).into_iter()) - { - if !seen_paths.insert(relative_path.clone()) { - continue; - } - - let absolute_path = base.join(&relative_path); - if absolute_path.exists() && absolute_path.is_file() { - stats.add(StatAccumulator::from_file(&absolute_path)); - } - } + let mut stats = StatAccumulator::default(); + let mut seen_paths = std::collections::HashSet::new(); + + for (source_rel, _) in collect_root_memory_prompt_pairs(config) { + for relative_path in + std::iter::once(source_rel.clone()).chain(derive_english_source_rel(&source_rel).into_iter()) + { + if !seen_paths.insert(relative_path.clone()) { + continue; + } + + let absolute_path = base.join(&relative_path); + if absolute_path.exists() && absolute_path.is_file() { + stats.add(StatAccumulator::from_file(&absolute_path)); + } } + } - stats + stats } fn accumulate_overall_stats( - summary: &StatAccumulator, - stats: &mut AindexStats, - all_ext: &mut std::collections::HashMap, + summary: &StatAccumulator, + stats: &mut AindexStats, + all_ext: &mut std::collections::HashMap, ) { - stats.total_files += summary.file_count; - stats.total_chars += summary.total_chars; - stats.total_lines += summary.total_lines; - stats.total_source_mdx += summary.source_mdx_count; - stats.total_resources += summary.resource_count; - for (key, value) in &summary.ext_map { - *all_ext.entry(key.clone()).or_default() += *value; - } + stats.total_files += summary.file_count; + stats.total_chars += summary.total_chars; + stats.total_lines += summary.total_lines; + stats.total_source_mdx += summary.source_mdx_count; + stats.total_resources += summary.resource_count; + for (key, value) in &summary.ext_map { + *all_ext.entry(key.clone()).or_default() += *value; + } } fn collect_project_series_stats( - base: &std::path::Path, - config: &tnmsc::core::config::UserConfigFile, - stats: &mut AindexStats, - all_ext: &mut std::collections::HashMap, + base: &std::path::Path, + config: &tnmsc::core::config::UserConfigFile, + stats: &mut AindexStats, + all_ext: &mut std::collections::HashMap, ) -> Result<(), String> { - for series_name in PROJECT_SERIES_CATEGORIES { - let category_paths = resolve_category_paths(config, series_name)?; - let src_dir = base.join(&category_paths.source_rel); - if !src_dir.exists() { - continue; - } + for series_name in PROJECT_SERIES_CATEGORIES { + let category_paths = resolve_category_paths(config, series_name)?; + let src_dir = base.join(&category_paths.source_rel); + if !src_dir.exists() { + continue; + } - if let Ok(entries) = std::fs::read_dir(&src_dir) { - for entry in entries.flatten() { - let path = entry.path(); - if path.is_dir() { - let project_name = path - .file_name() - .and_then(|n| n.to_str()) - .unwrap_or("") - .to_string(); - let label = if series_name == "app" { - project_name - } else { - format!("{series_name}/{project_name}") - }; - let project_stats = stat_dir(&path); - stats.projects.push(ProjectStats { - name: label, - file_count: project_stats.file_count, - total_chars: project_stats.total_chars, - total_lines: project_stats.total_lines, - }); - accumulate_overall_stats(&project_stats, stats, all_ext); - } - } + if let Ok(entries) = std::fs::read_dir(&src_dir) { + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + let project_name = path + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or("") + .to_string(); + let label = if series_name == "app" { + project_name + } else { + format!("{series_name}/{project_name}") + }; + let project_stats = stat_dir(&path); + stats.projects.push(ProjectStats { + name: label, + file_count: project_stats.file_count, + total_chars: project_stats.total_chars, + total_lines: project_stats.total_lines, + }); + accumulate_overall_stats(&project_stats, stats, all_ext); } + } } + } - Ok(()) + Ok(()) } fn build_aindex_stats( - base: &std::path::Path, - config: &tnmsc::core::config::UserConfigFile, + base: &std::path::Path, + config: &tnmsc::core::config::UserConfigFile, ) -> Result { - let mut stats = AindexStats::default(); - let mut all_ext: std::collections::HashMap = std::collections::HashMap::new(); - let root_prompt_stats = collect_root_memory_prompt_stats(base, config); - - accumulate_overall_stats(&root_prompt_stats, &mut stats, &mut all_ext); - collect_project_series_stats(base, config, &mut stats, &mut all_ext)?; - - // Root global/workspace prompts live outside the project-series directories, - // so the App category needs them merged back in explicitly. - for cat_name in &["app", "ext", "arch", "skills", "commands", "agents"] { - let category_paths = resolve_category_paths(config, cat_name)?; - let src_dir = base.join(&category_paths.source_rel); - let mut category_stats = if src_dir.exists() { - stat_dir(&src_dir) - } else { - StatAccumulator::default() - }; - if *cat_name == "app" { - category_stats.add(root_prompt_stats.clone()); - } - - stats.categories.push(CategoryStats { - name: cat_name.to_string(), - file_count: category_stats.file_count, - total_chars: category_stats.total_chars, - total_lines: category_stats.total_lines, - source_mdx_count: category_stats.source_mdx_count, - resource_count: category_stats.resource_count, - translated_count: category_stats.translated_count, - }); - - if !PROJECT_SERIES_CATEGORIES.contains(cat_name) { - accumulate_overall_stats(&category_stats, &mut stats, &mut all_ext); - } + let mut stats = AindexStats::default(); + let mut all_ext: std::collections::HashMap = std::collections::HashMap::new(); + let root_prompt_stats = collect_root_memory_prompt_stats(base, config); + + accumulate_overall_stats(&root_prompt_stats, &mut stats, &mut all_ext); + collect_project_series_stats(base, config, &mut stats, &mut all_ext)?; + + // Root global/workspace prompts live outside the project-series directories, + // so the App category needs them merged back in explicitly. + for cat_name in &["app", "ext", "arch", "skills", "commands", "agents"] { + let category_paths = resolve_category_paths(config, cat_name)?; + let src_dir = base.join(&category_paths.source_rel); + let mut category_stats = if src_dir.exists() { + stat_dir(&src_dir) + } else { + StatAccumulator::default() + }; + if *cat_name == "app" { + category_stats.add(root_prompt_stats.clone()); } - let dist_dir = base.join("dist"); - if dist_dir.exists() { - stats.total_translated = stat_dir(&dist_dir).file_count; + stats.categories.push(CategoryStats { + name: cat_name.to_string(), + file_count: category_stats.file_count, + total_chars: category_stats.total_chars, + total_lines: category_stats.total_lines, + source_mdx_count: category_stats.source_mdx_count, + resource_count: category_stats.resource_count, + translated_count: category_stats.translated_count, + }); + + if !PROJECT_SERIES_CATEGORIES.contains(cat_name) { + accumulate_overall_stats(&category_stats, &mut stats, &mut all_ext); } + } - let mut ext_vec: Vec<_> = all_ext.into_iter().collect(); - ext_vec.sort_by(|a, b| b.1.cmp(&a.1)); - stats.extensions = ext_vec - .into_iter() - .map(|(ext, count)| ExtensionCount { ext, count }) - .collect(); + let dist_dir = base.join("dist"); + if dist_dir.exists() { + stats.total_translated = stat_dir(&dist_dir).file_count; + } - stats - .projects - .sort_by(|a, b| b.file_count.cmp(&a.file_count)); + let mut ext_vec: Vec<_> = all_ext.into_iter().collect(); + ext_vec.sort_by(|a, b| b.1.cmp(&a.1)); + stats.extensions = ext_vec + .into_iter() + .map(|(ext, count)| ExtensionCount { ext, count }) + .collect(); - Ok(stats) + stats + .projects + .sort_by(|a, b| b.file_count.cmp(&a.file_count)); + + Ok(stats) } /// Gather comprehensive statistics about the aindex project. #[tauri::command] pub fn get_aindex_stats(cwd: String) -> Result { - let ResolvedConfig { - aindex_root: base, - config, - } = load_resolved_config(&cwd)?; - build_aindex_stats(&base, &config) + let ResolvedConfig { + aindex_root: base, + config, + } = load_resolved_config(&cwd)?; + build_aindex_stats(&base, &config) } #[cfg(test)] mod tests { - use super::*; - use std::time::{SystemTime, UNIX_EPOCH}; - - fn create_temp_dir(prefix: &str) -> PathBuf { - let unique = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time should be after unix epoch") - .as_nanos(); - let dir = std::env::temp_dir().join(format!("{prefix}-{unique}")); - std::fs::create_dir_all(&dir).expect("temp dir should be created"); - dir - } - - fn create_test_config() -> tnmsc::core::config::UserConfigFile { - tnmsc::core::config::UserConfigFile::default() - } + use super::*; + use std::time::{SystemTime, UNIX_EPOCH}; + + fn create_temp_dir(prefix: &str) -> PathBuf { + let unique = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time should be after unix epoch") + .as_nanos(); + let dir = std::env::temp_dir().join(format!("{prefix}-{unique}")); + std::fs::create_dir_all(&dir).expect("temp dir should be created"); + dir + } + + fn create_test_config() -> tnmsc::core::config::UserConfigFile { + tnmsc::core::config::UserConfigFile::default() + } + + #[test] + fn resolve_category_paths_supports_project_series() { + let config = create_test_config(); + + let app = resolve_category_paths(&config, "app").expect("app paths should resolve"); + let ext = resolve_category_paths(&config, "ext").expect("ext paths should resolve"); + let arch = resolve_category_paths(&config, "arch").expect("arch paths should resolve"); + + assert_eq!(app.source_rel, "app"); + assert_eq!(app.translated_rel, "dist/app"); + assert_eq!(ext.source_rel, "ext"); + assert_eq!(ext.translated_rel, "dist/ext"); + assert_eq!(arch.source_rel, "arch"); + assert_eq!(arch.translated_rel, "dist/arch"); + } + + #[test] + fn collect_project_series_category_files_scans_app_ext_and_arch() { + let base = create_temp_dir("tnmsc-tauri-series-files"); + + let app_src = base.join("app").join("project-a"); + let ext_src = base.join("ext").join("plugin-a"); + let arch_src = base.join("arch").join("system-a"); + let app_dist = base.join("dist").join("app"); + let ext_dist = base.join("dist").join("ext"); + let arch_dist = base.join("dist").join("arch"); + + std::fs::create_dir_all(&app_src).expect("app dir should be created"); + std::fs::create_dir_all(&ext_src).expect("ext dir should be created"); + std::fs::create_dir_all(&arch_src).expect("arch dir should be created"); + std::fs::create_dir_all(app_dist.join("project-a")).expect("app dist dir should be created"); + std::fs::create_dir_all(ext_dist.join("plugin-a")).expect("ext dist dir should be created"); + std::fs::create_dir_all(arch_dist.join("system-a")).expect("arch dist dir should be created"); + + std::fs::write(app_src.join("agt.src.mdx"), "App").expect("app src file should exist"); + std::fs::write(ext_src.join("agt.src.mdx"), "Ext").expect("ext src file should exist"); + std::fs::write(arch_src.join("agt.src.mdx"), "Arch").expect("arch src file should exist"); + std::fs::write(app_dist.join("project-a").join("agt.mdx"), "App dist") + .expect("app dist file should exist"); + std::fs::write(ext_dist.join("plugin-a").join("agt.mdx"), "Ext dist") + .expect("ext dist file should exist"); + std::fs::write(arch_dist.join("system-a").join("agt.mdx"), "Arch dist") + .expect("arch dist file should exist"); - #[test] - fn resolve_category_paths_supports_project_series() { - let config = create_test_config(); - - let app = resolve_category_paths(&config, "app").expect("app paths should resolve"); - let ext = resolve_category_paths(&config, "ext").expect("ext paths should resolve"); - let arch = resolve_category_paths(&config, "arch").expect("arch paths should resolve"); - - assert_eq!(app.source_rel, "app"); - assert_eq!(app.translated_rel, "dist/app"); - assert_eq!(ext.source_rel, "ext"); - assert_eq!(ext.translated_rel, "dist/ext"); - assert_eq!(arch.source_rel, "arch"); - assert_eq!(arch.translated_rel, "dist/arch"); - } - - #[test] - fn collect_project_series_category_files_scans_app_ext_and_arch() { - let base = create_temp_dir("tnmsc-tauri-series-files"); - - let app_src = base.join("app").join("project-a"); - let ext_src = base.join("ext").join("plugin-a"); - let arch_src = base.join("arch").join("system-a"); - let app_dist = base.join("dist").join("app"); - let ext_dist = base.join("dist").join("ext"); - let arch_dist = base.join("dist").join("arch"); - - std::fs::create_dir_all(&app_src).expect("app dir should be created"); - std::fs::create_dir_all(&ext_src).expect("ext dir should be created"); - std::fs::create_dir_all(&arch_src).expect("arch dir should be created"); - std::fs::create_dir_all(app_dist.join("project-a")) - .expect("app dist dir should be created"); - std::fs::create_dir_all(ext_dist.join("plugin-a")).expect("ext dist dir should be created"); - std::fs::create_dir_all(arch_dist.join("system-a")) - .expect("arch dist dir should be created"); - - std::fs::write(app_src.join("agt.src.mdx"), "App").expect("app src file should exist"); - std::fs::write(ext_src.join("agt.src.mdx"), "Ext").expect("ext src file should exist"); - std::fs::write(arch_src.join("agt.src.mdx"), "Arch").expect("arch src file should exist"); - std::fs::write(app_dist.join("project-a").join("agt.mdx"), "App dist") - .expect("app dist file should exist"); - std::fs::write(ext_dist.join("plugin-a").join("agt.mdx"), "Ext dist") - .expect("ext dist file should exist"); - std::fs::write(arch_dist.join("system-a").join("agt.mdx"), "Arch dist") - .expect("arch dist file should exist"); - - let mut entries = Vec::new(); - collect_project_series_category_files( - &base.join("app"), - &base, - "dist/app", - &app_dist, - &mut entries, - ) - .expect("app series files should collect"); - collect_project_series_category_files( - &base.join("ext"), - &base, - "dist/ext", - &ext_dist, - &mut entries, - ) - .expect("ext series files should collect"); - collect_project_series_category_files( - &base.join("arch"), - &base, - "dist/arch", - &arch_dist, - &mut entries, - ) - .expect("arch series files should collect"); - - let source_paths: Vec<_> = entries - .iter() - .map(|entry| entry.source_path.as_str()) - .collect(); - assert!(source_paths.contains(&"app/project-a/agt.src.mdx")); - assert!(source_paths.contains(&"ext/plugin-a/agt.src.mdx")); - assert!(source_paths.contains(&"arch/system-a/agt.src.mdx")); - assert!(entries.iter().all(|entry| entry.translated_exists)); - - std::fs::remove_dir_all(base).expect("temp dir should be removed"); - } - - #[test] - fn collect_root_memory_prompt_files_includes_root_level_sources() { - let base = create_temp_dir("tnmsc-tauri-root-prompts"); - let config = create_test_config(); - std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); - std::fs::write(base.join("global.src.mdx"), "Global") - .expect("global source prompt should be created"); - std::fs::write(base.join("workspace.src.mdx"), "Workspace") - .expect("workspace source prompt should be created"); - std::fs::write(base.join("dist").join("global.mdx"), "Global dist") - .expect("global dist prompt should be created"); - - let mut entries = Vec::new(); - collect_root_memory_prompt_files(&base, &config, &mut entries); - entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); - - assert_eq!(entries.len(), 2); - assert_eq!(entries[0].source_path, "global.src.mdx"); - assert_eq!(entries[0].translated_path, "dist/global.mdx"); - assert!(entries[0].translated_exists); - assert_eq!(entries[1].source_path, "workspace.src.mdx"); - assert_eq!(entries[1].translated_path, "dist/workspace.mdx"); - assert!(!entries[1].translated_exists); - - std::fs::remove_dir_all(base).expect("temp dir should be removed"); - } + let mut entries = Vec::new(); + collect_project_series_category_files( + &base.join("app"), + &base, + "dist/app", + &app_dist, + &mut entries, + ) + .expect("app series files should collect"); + collect_project_series_category_files( + &base.join("ext"), + &base, + "dist/ext", + &ext_dist, + &mut entries, + ) + .expect("ext series files should collect"); + collect_project_series_category_files( + &base.join("arch"), + &base, + "dist/arch", + &arch_dist, + &mut entries, + ) + .expect("arch series files should collect"); + + let source_paths: Vec<_> = entries + .iter() + .map(|entry| entry.source_path.as_str()) + .collect(); + assert!(source_paths.contains(&"app/project-a/agt.src.mdx")); + assert!(source_paths.contains(&"ext/plugin-a/agt.src.mdx")); + assert!(source_paths.contains(&"arch/system-a/agt.src.mdx")); + assert!(entries.iter().all(|entry| entry.translated_exists)); + + std::fs::remove_dir_all(base).expect("temp dir should be removed"); + } + + #[test] + fn collect_root_memory_prompt_files_includes_root_level_sources() { + let base = create_temp_dir("tnmsc-tauri-root-prompts"); + let config = create_test_config(); + std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); + std::fs::write(base.join("global.src.mdx"), "Global") + .expect("global source prompt should be created"); + std::fs::write(base.join("workspace.src.mdx"), "Workspace") + .expect("workspace source prompt should be created"); + std::fs::write(base.join("dist").join("global.mdx"), "Global dist") + .expect("global dist prompt should be created"); - #[test] - fn collect_category_file_entries_keeps_root_prompts_without_app_directory() { - let base = create_temp_dir("tnmsc-tauri-root-only-files"); - let config = create_test_config(); - std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); - std::fs::write(base.join("global.src.mdx"), "Global") - .expect("global source prompt should be created"); - std::fs::write(base.join("workspace.src.mdx"), "Workspace") - .expect("workspace source prompt should be created"); - - let entries = - collect_category_file_entries(&base, &config, "app").expect("app files should collect"); - let source_paths: Vec<_> = entries - .iter() - .map(|entry| entry.source_path.as_str()) - .collect(); - - assert_eq!(entries.len(), 2); - assert!(source_paths.contains(&"global.src.mdx")); - assert!(source_paths.contains(&"workspace.src.mdx")); - - std::fs::remove_dir_all(base).expect("temp dir should be removed"); - } + let mut entries = Vec::new(); + collect_root_memory_prompt_files(&base, &config, &mut entries); + entries.sort_by(|a, b| a.source_path.cmp(&b.source_path)); - #[test] - fn collect_project_series_stats_includes_ext_and_arch_projects() { - let base = create_temp_dir("tnmsc-tauri-series-stats"); - let config = create_test_config(); - std::fs::create_dir_all(base.join("app").join("project-a")) - .expect("app project dir should be created"); - std::fs::create_dir_all(base.join("ext").join("plugin-a")) - .expect("ext project dir should be created"); - std::fs::create_dir_all(base.join("arch").join("system-a")) - .expect("arch project dir should be created"); - std::fs::write( - base.join("app").join("project-a").join("agt.src.mdx"), - "App", - ) - .expect("app project file should be created"); - std::fs::write(base.join("ext").join("plugin-a").join("agt.src.mdx"), "Ext") - .expect("ext project file should be created"); - std::fs::write( - base.join("arch").join("system-a").join("agt.src.mdx"), - "Arch", - ) - .expect("arch project file should be created"); - - let mut stats = AindexStats::default(); - let mut all_ext = std::collections::HashMap::new(); - collect_project_series_stats(&base, &config, &mut stats, &mut all_ext) - .expect("project stats should collect"); - - let names: Vec<_> = stats - .projects - .iter() - .map(|project| project.name.as_str()) - .collect(); - assert!(names.contains(&"project-a")); - assert!(names.contains(&"ext/plugin-a")); - assert!(names.contains(&"arch/system-a")); - - std::fs::remove_dir_all(base).expect("temp dir should be removed"); - } + assert_eq!(entries.len(), 2); + assert_eq!(entries[0].source_path, "global.src.mdx"); + assert_eq!(entries[0].translated_path, "dist/global.mdx"); + assert!(entries[0].translated_exists); + assert_eq!(entries[1].source_path, "workspace.src.mdx"); + assert_eq!(entries[1].translated_path, "dist/workspace.mdx"); + assert!(!entries[1].translated_exists); + + std::fs::remove_dir_all(base).expect("temp dir should be removed"); + } + + #[test] + fn collect_category_file_entries_keeps_root_prompts_without_app_directory() { + let base = create_temp_dir("tnmsc-tauri-root-only-files"); + let config = create_test_config(); + std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); + std::fs::write(base.join("global.src.mdx"), "Global") + .expect("global source prompt should be created"); + std::fs::write(base.join("workspace.src.mdx"), "Workspace") + .expect("workspace source prompt should be created"); + + let entries = + collect_category_file_entries(&base, &config, "app").expect("app files should collect"); + let source_paths: Vec<_> = entries + .iter() + .map(|entry| entry.source_path.as_str()) + .collect(); + + assert_eq!(entries.len(), 2); + assert!(source_paths.contains(&"global.src.mdx")); + assert!(source_paths.contains(&"workspace.src.mdx")); + + std::fs::remove_dir_all(base).expect("temp dir should be removed"); + } + + #[test] + fn collect_project_series_stats_includes_ext_and_arch_projects() { + let base = create_temp_dir("tnmsc-tauri-series-stats"); + let config = create_test_config(); + std::fs::create_dir_all(base.join("app").join("project-a")) + .expect("app project dir should be created"); + std::fs::create_dir_all(base.join("ext").join("plugin-a")) + .expect("ext project dir should be created"); + std::fs::create_dir_all(base.join("arch").join("system-a")) + .expect("arch project dir should be created"); + std::fs::write( + base.join("app").join("project-a").join("agt.src.mdx"), + "App", + ) + .expect("app project file should be created"); + std::fs::write(base.join("ext").join("plugin-a").join("agt.src.mdx"), "Ext") + .expect("ext project file should be created"); + std::fs::write( + base.join("arch").join("system-a").join("agt.src.mdx"), + "Arch", + ) + .expect("arch project file should be created"); - #[test] - fn build_aindex_stats_counts_root_memory_prompts() { - let base = create_temp_dir("tnmsc-tauri-root-stats"); - let config = create_test_config(); - std::fs::create_dir_all(base.join("app").join("project-a")) - .expect("app project dir should be created"); - std::fs::create_dir_all(base.join("dist").join("app").join("project-a")) - .expect("app dist dir should be created"); - std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); - std::fs::write(base.join("global.src.mdx"), "Global zh") - .expect("global source prompt should be created"); - std::fs::write(base.join("global.mdx"), "Global en") - .expect("global english source should be created"); - std::fs::write(base.join("workspace.src.mdx"), "Workspace zh") - .expect("workspace source prompt should be created"); - std::fs::write(base.join("workspace.mdx"), "Workspace en") - .expect("workspace english source should be created"); - std::fs::write( - base.join("app").join("project-a").join("agt.src.mdx"), - "App project zh", - ) - .expect("app project source should be created"); - std::fs::write(base.join("dist").join("global.mdx"), "Global dist") - .expect("global dist should be created"); - std::fs::write(base.join("dist").join("workspace.mdx"), "Workspace dist") - .expect("workspace dist should be created"); - std::fs::write( - base.join("dist") - .join("app") - .join("project-a") - .join("agt.mdx"), - "App project dist", - ) - .expect("app project dist should be created"); - - let stats = build_aindex_stats(&base, &config).expect("stats should build"); - let app_category = stats - .categories - .iter() - .find(|category| category.name == "app") - .expect("app category should exist"); - - assert_eq!(stats.total_files, 5); - assert_eq!(stats.total_source_mdx, 3); - assert_eq!(stats.total_translated, 3); - assert_eq!(app_category.file_count, 5); - assert_eq!(app_category.source_mdx_count, 3); - - std::fs::remove_dir_all(base).expect("temp dir should be removed"); - } + let mut stats = AindexStats::default(); + let mut all_ext = std::collections::HashMap::new(); + collect_project_series_stats(&base, &config, &mut stats, &mut all_ext) + .expect("project stats should collect"); + + let names: Vec<_> = stats + .projects + .iter() + .map(|project| project.name.as_str()) + .collect(); + assert!(names.contains(&"project-a")); + assert!(names.contains(&"ext/plugin-a")); + assert!(names.contains(&"arch/system-a")); + + std::fs::remove_dir_all(base).expect("temp dir should be removed"); + } + + #[test] + fn build_aindex_stats_counts_root_memory_prompts() { + let base = create_temp_dir("tnmsc-tauri-root-stats"); + let config = create_test_config(); + std::fs::create_dir_all(base.join("app").join("project-a")) + .expect("app project dir should be created"); + std::fs::create_dir_all(base.join("dist").join("app").join("project-a")) + .expect("app dist dir should be created"); + std::fs::create_dir_all(base.join("dist")).expect("dist dir should be created"); + std::fs::write(base.join("global.src.mdx"), "Global zh") + .expect("global source prompt should be created"); + std::fs::write(base.join("global.mdx"), "Global en") + .expect("global english source should be created"); + std::fs::write(base.join("workspace.src.mdx"), "Workspace zh") + .expect("workspace source prompt should be created"); + std::fs::write(base.join("workspace.mdx"), "Workspace en") + .expect("workspace english source should be created"); + std::fs::write( + base.join("app").join("project-a").join("agt.src.mdx"), + "App project zh", + ) + .expect("app project source should be created"); + std::fs::write(base.join("dist").join("global.mdx"), "Global dist") + .expect("global dist should be created"); + std::fs::write(base.join("dist").join("workspace.mdx"), "Workspace dist") + .expect("workspace dist should be created"); + std::fs::write( + base + .join("dist") + .join("app") + .join("project-a") + .join("agt.mdx"), + "App project dist", + ) + .expect("app project dist should be created"); + + let stats = build_aindex_stats(&base, &config).expect("stats should build"); + let app_category = stats + .categories + .iter() + .find(|category| category.name == "app") + .expect("app category should exist"); + + assert_eq!(stats.total_files, 5); + assert_eq!(stats.total_source_mdx, 3); + assert_eq!(stats.total_translated, 3); + assert_eq!(app_category.file_count, 5); + assert_eq!(app_category.source_mdx_count, 3); + + std::fs::remove_dir_all(base).expect("temp dir should be removed"); + } } diff --git a/gui/src-tauri/src/lib.rs b/gui/src-tauri/src/lib.rs index b66648cc..0d9190ae 100644 --- a/gui/src-tauri/src/lib.rs +++ b/gui/src-tauri/src/lib.rs @@ -6,38 +6,38 @@ use tauri::Manager; #[cfg_attr(mobile, tauri::mobile_entry_point)] pub fn run() { - tauri::Builder::default() - .plugin(tauri_plugin_shell::init()) - .plugin(tauri_plugin_updater::Builder::new().build()) - .invoke_handler(tauri::generate_handler![ - commands::execute_pipeline, - commands::load_config, - commands::list_plugins, - commands::clean_outputs, - commands::get_logs, - commands::read_config_file, - commands::write_config_file, - commands::open_config_dir, - commands::list_aindex_files, - commands::read_aindex_file, - commands::write_aindex_file, - commands::list_category_files, - commands::get_aindex_stats, - ]) - .setup(|app| { - tray::create_tray(app)?; + tauri::Builder::default() + .plugin(tauri_plugin_shell::init()) + .plugin(tauri_plugin_updater::Builder::new().build()) + .invoke_handler(tauri::generate_handler![ + commands::install_pipeline, + commands::load_config, + commands::list_plugins, + commands::clean_outputs, + commands::get_logs, + commands::read_config_file, + commands::write_config_file, + commands::open_config_dir, + commands::list_aindex_files, + commands::read_aindex_file, + commands::write_aindex_file, + commands::list_category_files, + commands::get_aindex_stats, + ]) + .setup(|app| { + tray::create_tray(app)?; - let window = app.get_webview_window("main").unwrap(); - let window_clone = window.clone(); - window.on_window_event(move |event| { - if let tauri::WindowEvent::CloseRequested { api, .. } = event { - api.prevent_close(); - let _ = window_clone.hide(); - } - }); + let window = app.get_webview_window("main").unwrap(); + let window_clone = window.clone(); + window.on_window_event(move |event| { + if let tauri::WindowEvent::CloseRequested { api, .. } = event { + api.prevent_close(); + let _ = window_clone.hide(); + } + }); - Ok(()) - }) - .run(tauri::generate_context!()) - .expect("error while running tauri application"); + Ok(()) + }) + .run(tauri::generate_context!()) + .expect("error while running tauri application"); } diff --git a/gui/src-tauri/src/main.rs b/gui/src-tauri/src/main.rs index 69c3a72e..ad5fe839 100644 --- a/gui/src-tauri/src/main.rs +++ b/gui/src-tauri/src/main.rs @@ -2,5 +2,5 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] fn main() { - app_lib::run(); + app_lib::run(); } diff --git a/gui/src-tauri/src/tray.rs b/gui/src-tauri/src/tray.rs index a2f9031a..c729bbd9 100644 --- a/gui/src-tauri/src/tray.rs +++ b/gui/src-tauri/src/tray.rs @@ -1,7 +1,7 @@ /// System tray integration for the Memory Sync desktop application. /// /// Creates a tray icon with a context menu containing three actions: -/// - **执行同步** (`execute`): Triggers pipeline execution. Currently shows +/// - **Install** (`install`): Triggers pipeline execution. Currently shows /// the main window as a placeholder until full sidecar integration is wired. /// - **打开主窗口** (`show`): Brings the main window to the foreground. /// - **退出** (`quit`): Fully exits the application process. @@ -13,13 +13,13 @@ /// /// - 8.1 — Display tray icon on startup /// - 8.2 — Click tray icon toggles window show/hide -/// - 8.3 — Right-click context menu with execute, show, quit -/// - 8.4 — "执行同步" triggers pipeline execution +/// - 8.3 — Right-click context menu with install, show, quit +/// - 8.4 — "Install" triggers pipeline execution /// - 8.6 — "退出" fully exits the application use tauri::{ - Manager, - menu::{Menu, MenuItem}, - tray::{TrayIcon, TrayIconBuilder, TrayIconEvent}, + Manager, + menu::{Menu, MenuItem}, + tray::{TrayIcon, TrayIconBuilder, TrayIconEvent}, }; /// Create and configure the system tray icon with its context menu. @@ -32,55 +32,55 @@ use tauri::{ /// Returns a [`tauri::Error`] if menu item creation, menu assembly, icon /// retrieval, or tray builder registration fails. pub fn create_tray(app: &tauri::App) -> Result { - // ── Context menu items ────────────────────────────────────────────── - let execute_item = MenuItem::with_id(app, "execute", "执行同步", true, None::<&str>)?; - let show_item = MenuItem::with_id(app, "show", "打开主窗口", true, None::<&str>)?; - let quit_item = MenuItem::with_id(app, "quit", "退出", true, None::<&str>)?; + // ── Context menu items ────────────────────────────────────────────── + let install_item = MenuItem::with_id(app, "install", "Install", true, None::<&str>)?; + let show_item = MenuItem::with_id(app, "show", "打开主窗口", true, None::<&str>)?; + let quit_item = MenuItem::with_id(app, "quit", "退出", true, None::<&str>)?; - let menu = Menu::with_items(app, &[&execute_item, &show_item, &quit_item])?; + let menu = Menu::with_items(app, &[&install_item, &show_item, &quit_item])?; - // ── Build the tray icon ───────────────────────────────────────────── - TrayIconBuilder::new() - .icon(app.default_window_icon().unwrap().clone()) - .menu(&menu) - // Handle context-menu item clicks. - .on_menu_event(|app, event| { - match event.id.as_ref() { - "execute" => { - // TODO: Trigger pipeline execution via sidecar once the - // full IPC wiring is in place. For now, surface the - // main window so the user can initiate execution - // from the GUI. - if let Some(window) = app.get_webview_window("main") { - let _ = window.show(); - let _ = window.set_focus(); - } - } - "show" => { - if let Some(window) = app.get_webview_window("main") { - let _ = window.show(); - let _ = window.set_focus(); - } - } - "quit" => { - app.exit(0); - } - _ => {} - } - }) - // Left-click on the tray icon toggles window visibility. - .on_tray_icon_event(|tray, event| { - if let TrayIconEvent::Click { .. } = event { - let app = tray.app_handle(); - if let Some(window) = app.get_webview_window("main") { - if window.is_visible().unwrap_or(false) { - let _ = window.hide(); - } else { - let _ = window.show(); - let _ = window.set_focus(); - } - } - } - }) - .build(app) + // ── Build the tray icon ───────────────────────────────────────────── + TrayIconBuilder::new() + .icon(app.default_window_icon().unwrap().clone()) + .menu(&menu) + // Handle context-menu item clicks. + .on_menu_event(|app, event| { + match event.id.as_ref() { + "install" => { + // TODO: Trigger pipeline execution via sidecar once the + // full IPC wiring is in place. For now, surface the + // main window so the user can initiate installation + // from the GUI. + if let Some(window) = app.get_webview_window("main") { + let _ = window.show(); + let _ = window.set_focus(); + } + } + "show" => { + if let Some(window) = app.get_webview_window("main") { + let _ = window.show(); + let _ = window.set_focus(); + } + } + "quit" => { + app.exit(0); + } + _ => {} + } + }) + // Left-click on the tray icon toggles window visibility. + .on_tray_icon_event(|tray, event| { + if let TrayIconEvent::Click { .. } = event { + let app = tray.app_handle(); + if let Some(window) = app.get_webview_window("main") { + if window.is_visible().unwrap_or(false) { + let _ = window.hide(); + } else { + let _ = window.show(); + let _ = window.set_focus(); + } + } + } + }) + .build(app) } diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 7e9a3ab8..1900e723 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10408.12323", + "version": "2026.10411.10132", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/gui/src-tauri/tests/ipc_contract_property.rs b/gui/src-tauri/tests/ipc_contract_property.rs index 34312d91..55e0b97a 100644 --- a/gui/src-tauri/tests/ipc_contract_property.rs +++ b/gui/src-tauri/tests/ipc_contract_property.rs @@ -17,51 +17,51 @@ use serde_json::Value; // --------------------------------------------------------------------------- fn arb_plugin_execution_result() -> impl Strategy { - (any::(), any::(), any::(), any::()).prop_map( - |(plugin, files, dirs, dry_run)| PluginExecutionResult { - plugin, - files, - dirs, - dry_run, - }, - ) + (any::(), any::(), any::(), any::()).prop_map( + |(plugin, files, dirs, dry_run)| PluginExecutionResult { + plugin, + files, + dirs, + dry_run, + }, + ) } fn arb_log_entry() -> impl Strategy { - (any::(), any::(), any::()).prop_map(|(timestamp, level, logger)| { - LogEntry { - timestamp, - level, - logger, - payload: serde_json::Value::Null, - } + ( + prop::sample::select(vec!["stdout".to_string(), "stderr".to_string()]), + prop::option::of(any::()), + any::(), + ) + .prop_map(|(stream, source, markdown)| LogEntry { + stream, + source, + markdown, }) } fn arb_pipeline_result() -> impl Strategy { - ( - any::(), - any::(), - any::(), - any::(), - prop::collection::vec(arb_plugin_execution_result(), 0..5), - prop::collection::vec(arb_log_entry(), 0..5), - prop::collection::vec(any::(), 0..5), + ( + any::(), + any::(), + any::(), + any::(), + prop::collection::vec(arb_plugin_execution_result(), 0..5), + prop::collection::vec(arb_log_entry(), 0..5), + prop::collection::vec(any::(), 0..5), + ) + .prop_map( + |(success, total_files, total_dirs, dry_run, plugin_results, logs, errors)| PipelineResult { + success, + total_files, + total_dirs, + dry_run, + command: None, + plugin_results, + logs, + errors, + }, ) - .prop_map( - |(success, total_files, total_dirs, dry_run, plugin_results, logs, errors)| { - PipelineResult { - success, - total_files, - total_dirs, - dry_run, - command: None, - plugin_results, - logs, - errors, - } - }, - ) } // --------------------------------------------------------------------------- @@ -188,16 +188,11 @@ proptest! { .expect("serialised JSON must be valid"); let obj = val.as_object().expect("LogEntry JSON must be an object"); - prop_assert!(obj.contains_key("timestamp"), "JSON must contain 'timestamp'"); - prop_assert!(obj["timestamp"].is_string(), "'timestamp' must be a string"); - - prop_assert!(obj.contains_key("level"), "JSON must contain 'level'"); - prop_assert!(obj["level"].is_string(), "'level' must be a string"); - - prop_assert!(obj.contains_key("logger"), "JSON must contain 'logger'"); - prop_assert!(obj["logger"].is_string(), "'logger' must be a string"); + prop_assert!(obj.contains_key("stream"), "JSON must contain 'stream'"); + prop_assert!(obj["stream"].is_string(), "'stream' must be a string"); - prop_assert!(obj.contains_key("payload"), "JSON must contain 'payload'"); + prop_assert!(obj.contains_key("markdown"), "JSON must contain 'markdown'"); + prop_assert!(obj["markdown"].is_string(), "'markdown' must be a string"); } /// Round-trip: deserialise(serialise(LogEntry)) == original. @@ -210,8 +205,8 @@ proptest! { let restored: LogEntry = serde_json::from_str(&json) .expect("LogEntry must deserialise from its own JSON"); - prop_assert_eq!(entry.timestamp, restored.timestamp); - prop_assert_eq!(entry.level, restored.level); - prop_assert_eq!(entry.logger, restored.logger); + prop_assert_eq!(entry.stream, restored.stream); + prop_assert_eq!(entry.source, restored.source); + prop_assert_eq!(entry.markdown, restored.markdown); } } diff --git a/gui/src-tauri/tests/sidecar_removed_test.rs b/gui/src-tauri/tests/sidecar_removed_test.rs index 2ae6733c..dcd4f78c 100644 --- a/gui/src-tauri/tests/sidecar_removed_test.rs +++ b/gui/src-tauri/tests/sidecar_removed_test.rs @@ -9,45 +9,45 @@ const COMMANDS_SRC: &str = include_str!("../src/commands.rs"); /// Helper: assert a `fn ` definition is absent from the source. fn assert_fn_absent(source: &str, fn_name: &str) { - // Match both `fn name(` and `fn name<` to catch generic variants. - let pattern_paren = format!("fn {}(", fn_name); - let pattern_angle = format!("fn {}<", fn_name); - assert!( - !source.contains(&pattern_paren) && !source.contains(&pattern_angle), - "Sidecar function `{fn_name}` should have been removed from commands.rs but was found" - ); + // Match both `fn name(` and `fn name<` to catch generic variants. + let pattern_paren = format!("fn {}(", fn_name); + let pattern_angle = format!("fn {}<", fn_name); + assert!( + !source.contains(&pattern_paren) && !source.contains(&pattern_angle), + "Sidecar function `{fn_name}` should have been removed from commands.rs but was found" + ); } /// Requirement 3.1 — `resolve_cli_path` and its helpers must be removed. #[test] fn test_resolve_cli_path_removed() { - assert_fn_absent(COMMANDS_SRC, "resolve_cli_path"); + assert_fn_absent(COMMANDS_SRC, "resolve_cli_path"); } /// Requirement 3.2 — `run_cli` (subprocess invocation) must be removed. #[test] fn test_run_cli_removed() { - assert_fn_absent(COMMANDS_SRC, "run_cli"); + assert_fn_absent(COMMANDS_SRC, "run_cli"); } /// Requirement 3.3 — `check_cli` Tauri command must be removed. #[test] fn test_check_cli_removed() { - assert_fn_absent(COMMANDS_SRC, "check_cli"); + assert_fn_absent(COMMANDS_SRC, "check_cli"); } /// Requirement 3.4 — stdout log-parsing helpers must be removed. #[test] fn test_strip_ansi_removed() { - assert_fn_absent(COMMANDS_SRC, "strip_ansi"); + assert_fn_absent(COMMANDS_SRC, "strip_ansi"); } #[test] fn test_parse_log_line_removed() { - assert_fn_absent(COMMANDS_SRC, "parse_log_line"); + assert_fn_absent(COMMANDS_SRC, "parse_log_line"); } #[test] fn test_parse_all_logs_removed() { - assert_fn_absent(COMMANDS_SRC, "parse_all_logs"); + assert_fn_absent(COMMANDS_SRC, "parse_all_logs"); } diff --git a/gui/src/api/bridge.property.test.ts b/gui/src/api/bridge.property.test.ts index 639829ba..7181da12 100644 --- a/gui/src/api/bridge.property.test.ts +++ b/gui/src/api/bridge.property.test.ts @@ -20,15 +20,10 @@ const arbPluginExecutionResult: fc.Arbitrary = fc.record( dryRun: fc.boolean(), }) -// Use integer ms in a safe range to avoid Invalid Date during shrinking -const MIN_TS = new Date('2000-01-01T00:00:00.000Z').getTime() -const MAX_TS = new Date('2099-12-31T23:59:59.999Z').getTime() - const arbLogEntry: fc.Arbitrary = fc.record({ - timestamp: fc.integer({ min: MIN_TS, max: MAX_TS }).map((ms) => new Date(ms).toISOString()), - level: fc.constantFrom('info', 'warn', 'error', 'debug', 'verbose'), - logger: fc.string({ minLength: 1, maxLength: 64 }), - payload: fc.oneof(fc.string(), fc.integer(), fc.boolean(), fc.constant(null)), + stream: fc.constantFrom('stdout', 'stderr'), + source: fc.option(fc.string({ minLength: 1, maxLength: 64 }), { nil: undefined }), + markdown: fc.string({ minLength: 0, maxLength: 400 }), }) const arbPipelineResult: fc.Arbitrary = fc @@ -122,10 +117,8 @@ describe('LogEntry interface field integrity', () => { fc.property(arbLogEntry, (entry) => { const parsed = JSON.parse(JSON.stringify(entry)) as Record - expect(typeof parsed['timestamp']).toBe('string') - expect(typeof parsed['level']).toBe('string') - expect(typeof parsed['logger']).toBe('string') - expect('payload' in parsed).toBe(true) + expect(typeof parsed['stream']).toBe('string') + expect(typeof parsed['markdown']).toBe('string') }), { numRuns: 200 }, ) @@ -136,10 +129,9 @@ describe('LogEntry interface field integrity', () => { fc.property(arbLogEntry, (entry) => { const roundTripped = JSON.parse(JSON.stringify(entry)) as LogEntry - expect(roundTripped.timestamp).toBe(entry.timestamp) - expect(roundTripped.level).toBe(entry.level) - expect(roundTripped.logger).toBe(entry.logger) - expect(roundTripped.payload).toStrictEqual(entry.payload) + expect(roundTripped.stream).toBe(entry.stream) + expect(roundTripped.source).toBe(entry.source) + expect(roundTripped.markdown).toStrictEqual(entry.markdown) }), { numRuns: 200 }, ) @@ -171,10 +163,8 @@ describe('PipelineResult nested structure integrity', () => { for (const log of parsed.logs) { const l = log as unknown as Record - expect(typeof l['timestamp']).toBe('string') - expect(typeof l['level']).toBe('string') - expect(typeof l['logger']).toBe('string') - expect('payload' in l).toBe(true) + expect(typeof l['stream']).toBe('string') + expect(typeof l['markdown']).toBe('string') } }), { numRuns: 200 }, diff --git a/gui/src/api/bridge.test.ts b/gui/src/api/bridge.test.ts index cb13a0b1..1260d3ae 100644 --- a/gui/src/api/bridge.test.ts +++ b/gui/src/api/bridge.test.ts @@ -10,11 +10,11 @@ import { invoke } from '@tauri-apps/api/core' import { cleanOutputs, - executePipeline, getAindexStats, + installPipeline, listAindexFiles, listCategoryFiles, - listPlugins, + listAdaptors, loadConfig, } from '@/api/bridge' @@ -28,7 +28,7 @@ afterEach(() => { vi.restoreAllMocks() }) -describe('executePipeline', () => { +describe('installPipeline', () => { const mockResult: PipelineResult = { success: true, totalFiles: 5, @@ -46,13 +46,13 @@ describe('executePipeline', () => { errors: [], } - it('should invoke execute_pipeline with cwd and dryRun', async () => { + it('should invoke install_pipeline with cwd and dryRun', async () => { mockedInvoke.mockResolvedValue(mockResult) - const result = await executePipeline('/home/user/project', true) + const result = await installPipeline('/home/user/project', true) expect(mockedInvoke).toHaveBeenCalledOnce() - expect(mockedInvoke).toHaveBeenCalledWith('execute_pipeline', { + expect(mockedInvoke).toHaveBeenCalledWith('install_pipeline', { cwd: '/home/user/project', dryRun: true, }) @@ -62,9 +62,9 @@ describe('executePipeline', () => { it('should default dryRun to false', async () => { mockedInvoke.mockResolvedValue(mockResult) - await executePipeline('/workspace') + await installPipeline('/workspace') - expect(mockedInvoke).toHaveBeenCalledWith('execute_pipeline', { + expect(mockedInvoke).toHaveBeenCalledWith('install_pipeline', { cwd: '/workspace', dryRun: false, }) @@ -73,7 +73,7 @@ describe('executePipeline', () => { it('should propagate invoke rejection', async () => { mockedInvoke.mockRejectedValue(new Error('sidecar not found')) - await expect(executePipeline('/bad/path')).rejects.toThrow('sidecar not found') + await expect(installPipeline('/bad/path')).rejects.toThrow('sidecar not found') }) }) @@ -150,7 +150,7 @@ describe('loadConfig', () => { }) }) -describe('listPlugins', () => { +describe('listAdaptors', () => { const mockPlugins: PluginExecutionResult[] = [ { plugin: 'GlobalMemoryInputPlugin', @@ -169,7 +169,7 @@ describe('listPlugins', () => { it('should invoke list_plugins with cwd only', async () => { mockedInvoke.mockResolvedValue(mockPlugins) - const result = await listPlugins('/home/user/project') + const result = await listAdaptors('/home/user/project') expect(mockedInvoke).toHaveBeenCalledOnce() expect(mockedInvoke).toHaveBeenCalledWith('list_plugins', { @@ -181,7 +181,7 @@ describe('listPlugins', () => { it('should return typed PluginExecutionResult array', async () => { mockedInvoke.mockResolvedValue(mockPlugins) - const result = await listPlugins('/workspace') + const result = await listAdaptors('/workspace') expect(result).toHaveLength(2) expect(result[0].plugin).toBe('GlobalMemoryInputPlugin') @@ -192,7 +192,7 @@ describe('listPlugins', () => { it('should propagate invoke rejection', async () => { mockedInvoke.mockRejectedValue(new Error('timeout')) - await expect(listPlugins('/slow')).rejects.toThrow('timeout') + await expect(listAdaptors('/slow')).rejects.toThrow('timeout') }) }) diff --git a/gui/src/api/bridge.ts b/gui/src/api/bridge.ts index 40c4e83a..9a60268f 100644 --- a/gui/src/api/bridge.ts +++ b/gui/src/api/bridge.ts @@ -1,10 +1,9 @@ import { invoke } from '@tauri-apps/api/core' export interface LogEntry { - readonly timestamp: string - readonly level: string - readonly logger: string - readonly payload: unknown + readonly stream: 'stdout' | 'stderr' + readonly source?: string + readonly markdown: string } export interface PluginExecutionResult { @@ -25,8 +24,8 @@ export interface PipelineResult { readonly errors: readonly string[] } -export function executePipeline(cwd: string, dryRun = false): Promise { - return invoke('execute_pipeline', { cwd, dryRun }) +export function installPipeline(cwd: string, dryRun = false): Promise { + return invoke('install_pipeline', { cwd, dryRun }) } export function cleanOutputs(cwd: string, dryRun = false): Promise { @@ -37,7 +36,7 @@ export function loadConfig(cwd: string): Promise { return invoke('load_config', { cwd }) } -export function listPlugins(cwd: string): Promise { +export function listAdaptors(cwd: string): Promise { return invoke('list_plugins', { cwd }) } diff --git a/gui/src/components/MarkdownLogBlock.tsx b/gui/src/components/MarkdownLogBlock.tsx new file mode 100644 index 00000000..0576d062 --- /dev/null +++ b/gui/src/components/MarkdownLogBlock.tsx @@ -0,0 +1,85 @@ +import type {FC, ReactNode} from 'react' + +import {cn} from '@/lib/utils' + +interface MarkdownLogBlockProps { + readonly markdown: string + readonly className?: string +} + +function renderInlineMarkdown(text: string): ReactNode[] { + return text.split(/(`[^`]+`)/g).flatMap((segment, segmentIndex) => { + if (/^`[^`]+`$/u.test(segment)) { + return ( + + {segment.slice(1, -1)} + + ) + } + + return segment.split(/(\*\*[^*]+\*\*)/g).map((part, partIndex) => { + if (/^\*\*[^*]+\*\*$/u.test(part)) { + return ( + + {part.slice(2, -2)} + + ) + } + + return {part} + }) + }) +} + +function isMarkdownListBlock(lines: readonly string[]): boolean { + return lines.every((line) => line.trim().length === 0 || /^\s*(?:- |\d+\. )/u.test(line)) +} + +export const MarkdownLogBlock: FC = ({markdown, className}) => { + const blocks = markdown.trim().split(/\n{2,}/u).filter((block) => block.trim().length > 0) + + return ( +
+ {blocks.map((block, index) => { + const lines = block.split('\n') + const firstLine = lines[0]?.trim() ?? '' + + if (firstLine.startsWith('### ')) { + return ( +

+ {renderInlineMarkdown(firstLine.slice(4))} +

+ ) + } + + if (lines.length === 1 && /^\*\*[^*]+\*\*$/u.test(firstLine)) { + return ( +

+ {renderInlineMarkdown(firstLine)} +

+ ) + } + + if (isMarkdownListBlock(lines)) { + return ( +
+              {block}
+            
+ ) + } + + return ( +

+ {renderInlineMarkdown(block)} +

+ ) + })} +
+ ) +} diff --git a/gui/src/components/Sidebar.tsx b/gui/src/components/Sidebar.tsx index 249acf53..27dc8a9f 100644 --- a/gui/src/components/Sidebar.tsx +++ b/gui/src/components/Sidebar.tsx @@ -17,7 +17,7 @@ import { useI18n } from '@/i18n' import { cn } from '@/lib/utils' interface NavItem { - readonly to: '/' | '/pipeline' | '/config' | '/plugins' | '/files' | '/logs' | '/settings' + readonly to: '/' | '/pipeline' | '/config' | '/adaptors' | '/files' | '/logs' | '/settings' readonly labelKey: string readonly icon: FC<{ className?: string }> } @@ -26,7 +26,7 @@ const navItems: readonly NavItem[] = [ { to: '/' as const, labelKey: 'nav.dashboard', icon: LayoutDashboard }, { to: '/pipeline' as const, labelKey: 'nav.pipeline', icon: Workflow }, { to: '/config' as const, labelKey: 'nav.config', icon: FileText }, - { to: '/plugins' as const, labelKey: 'nav.plugins', icon: Plug }, + { to: '/adaptors' as const, labelKey: 'nav.plugins', icon: Plug }, { to: '/files' as const, labelKey: 'nav.files', icon: FolderOpen }, { to: '/logs' as const, labelKey: 'nav.logs', icon: ScrollText }, { to: '/settings' as const, labelKey: 'nav.settings', icon: Cog }, diff --git a/gui/src/components/__tests__/Sidebar.property.test.tsx b/gui/src/components/__tests__/Sidebar.property.test.tsx index ebb77f4c..580e82eb 100644 --- a/gui/src/components/__tests__/Sidebar.property.test.tsx +++ b/gui/src/components/__tests__/Sidebar.property.test.tsx @@ -28,7 +28,7 @@ const NAV_ROUTE_PATHS = [ '/', '/pipeline', '/config', - '/plugins', + '/adaptors', '/logs', '/settings', ] as const diff --git a/gui/src/hooks/usePipeline.ts b/gui/src/hooks/usePipeline.ts index 1741c550..ff12fa11 100644 --- a/gui/src/hooks/usePipeline.ts +++ b/gui/src/hooks/usePipeline.ts @@ -1,7 +1,7 @@ import { useCallback, useState } from 'react'; import type { PipelineResult } from '@/api/bridge'; -import { cleanOutputs, executePipeline } from '@/api/bridge'; +import { cleanOutputs, installPipeline } from '@/api/bridge'; export type PipelineStatus = | { readonly kind: 'idle' } @@ -11,7 +11,7 @@ export type PipelineStatus = export interface UsePipelineReturn { readonly status: PipelineStatus - readonly execute: (cwd: string) => Promise + readonly install: (cwd: string) => Promise readonly clean: (cwd: string) => Promise readonly dryRun: (cwd: string) => Promise readonly reset: () => void @@ -20,10 +20,10 @@ export interface UsePipelineReturn { export function usePipeline(): UsePipelineReturn { const [status, setStatus] = useState({ kind: 'idle' }) - const execute = useCallback(async (cwd: string) => { + const install = useCallback(async (cwd: string) => { setStatus({ kind: 'running' }) try { - const result = await executePipeline(cwd, false) + const result = await installPipeline(cwd, false) setStatus({ kind: 'completed', result }) } catch (err) { setStatus({ kind: 'error', message: err instanceof Error ? err.message : String(err) }) @@ -43,7 +43,7 @@ export function usePipeline(): UsePipelineReturn { const dryRun = useCallback(async (cwd: string) => { setStatus({ kind: 'running' }) try { - const result = await executePipeline(cwd, true) + const result = await installPipeline(cwd, true) setStatus({ kind: 'completed', result }) } catch (err) { setStatus({ kind: 'error', message: err instanceof Error ? err.message : String(err) }) @@ -54,5 +54,5 @@ export function usePipeline(): UsePipelineReturn { setStatus({ kind: 'idle' }) }, []) - return { status, execute, clean, dryRun, reset } + return { status, install, clean, dryRun, reset } } diff --git a/gui/src/i18n/en-US.json b/gui/src/i18n/en-US.json index e5d3c473..40c558d0 100644 --- a/gui/src/i18n/en-US.json +++ b/gui/src/i18n/en-US.json @@ -7,7 +7,7 @@ "nav.logs": "Logs", "nav.files": "Files", "nav.settings": "Settings", - "pipeline.execute": "Execute Sync", + "pipeline.install": "Install", "pipeline.clean": "Clean Outputs", "pipeline.dryRun": "Dry Run", "pipeline.status.idle": "Idle", @@ -39,10 +39,8 @@ "plugins.type.output": "Output Plugin", "logs.title": "Logs", "logs.filter.all": "All", - "logs.filter.error": "Error", - "logs.filter.warn": "Warning", - "logs.filter.info": "Info", - "logs.filter.debug": "Debug", + "logs.filter.stdout": "Stdout", + "logs.filter.stderr": "Stderr", "files.title": "Files", "files.tab.app": "App", "files.tab.ext": "Ext", @@ -77,7 +75,7 @@ "common.confirm": "Confirm", "common.error": "Error", "common.success": "Success", - "tray.execute": "Execute Sync", + "tray.install": "Install", "tray.show": "Open Main Window", "tray.quit": "Quit", "dashboard.stats.title": "Aindex Statistics", diff --git a/gui/src/i18n/zh-CN.json b/gui/src/i18n/zh-CN.json index 9e6bd708..14f46eee 100644 --- a/gui/src/i18n/zh-CN.json +++ b/gui/src/i18n/zh-CN.json @@ -7,7 +7,7 @@ "nav.logs": "日志查看", "nav.files": "文件查看", "nav.settings": "设置", - "pipeline.execute": "执行同步", + "pipeline.install": "Install", "pipeline.clean": "清理输出", "pipeline.dryRun": "预览模式", "pipeline.status.idle": "空闲", @@ -39,10 +39,8 @@ "plugins.type.output": "输出插件", "logs.title": "日志查看", "logs.filter.all": "全部", - "logs.filter.error": "错误", - "logs.filter.warn": "警告", - "logs.filter.info": "信息", - "logs.filter.debug": "调试", + "logs.filter.stdout": "标准输出", + "logs.filter.stderr": "标准错误", "files.title": "文件查看", "files.tab.app": "应用", "files.tab.ext": "扩展", @@ -77,7 +75,7 @@ "common.confirm": "确认", "common.error": "错误", "common.success": "成功", - "tray.execute": "执行同步", + "tray.install": "Install", "tray.show": "打开主窗口", "tray.quit": "退出", "dashboard.stats.title": "Aindex 统计", diff --git a/gui/src/pages/PluginsPage.tsx b/gui/src/pages/AdaptorsPage.tsx similarity index 94% rename from gui/src/pages/PluginsPage.tsx rename to gui/src/pages/AdaptorsPage.tsx index bef44cd5..9cdb690f 100644 --- a/gui/src/pages/PluginsPage.tsx +++ b/gui/src/pages/AdaptorsPage.tsx @@ -4,11 +4,11 @@ import { useEffect, useState } from 'react' import { RefreshCw } from 'lucide-react' import type { PluginExecutionResult } from '@/api/bridge' -import { listPlugins } from '@/api/bridge' +import { listAdaptors } from '@/api/bridge' import { useI18n } from '@/i18n' import { cn } from '@/lib/utils' -const PluginsPage: FC = () => { +const AdaptorsPage: FC = () => { const { t } = useI18n() const [plugins, setPlugins] = useState([]) const [loading, setLoading] = useState(false) @@ -20,7 +20,7 @@ const PluginsPage: FC = () => { setLoading(true) setError(null) try { - const result = await listPlugins(cwd) + const result = await listAdaptors(cwd) setPlugins(result) } catch (err) { setError(err instanceof Error ? err.message : String(err)) @@ -84,4 +84,4 @@ const PluginsPage: FC = () => { ) } -export default PluginsPage +export default AdaptorsPage diff --git a/gui/src/pages/DashboardPage.tsx b/gui/src/pages/DashboardPage.tsx index 422d4d5a..b5c4fd8c 100644 --- a/gui/src/pages/DashboardPage.tsx +++ b/gui/src/pages/DashboardPage.tsx @@ -58,7 +58,7 @@ const DashboardPage: FC = () => { const { t } = useI18n() const { resolved } = useTheme() const isDark = resolved === 'dark' - const { status, execute, clean, dryRun, reset } = usePipeline() + const { status, install, clean, dryRun, reset } = usePipeline() const [stats, setStats] = useState(null) const [statsLoading, setStatsLoading] = useState(false) @@ -142,9 +142,9 @@ const DashboardPage: FC = () => { {/* Quick Actions */}
-