diff --git a/.config/lockstep.schema.json b/.config/lockstep.schema.json new file mode 100644 index 0000000..f0b7689 --- /dev/null +++ b/.config/lockstep.schema.json @@ -0,0 +1,463 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://github.com/SocketDev/lockstep.schema.json", + "title": "lockstep manifest", + "description": "Unified lock-step manifest shared across Socket repos. One schema, all cases — the `kind` discriminator on each row selects which flavor of lock-step applies. Single-file manifests work for repos with one cohesive concern; the `includes[]` field carves a manifest into per-area files (e.g. lockstep-acorn.json + lockstep-build.json) when one repo tracks multiple independent concerns.", + "type": "object", + "required": ["rows"], + "properties": { + "$schema": { + "description": "JSON Schema reference for editor autocompletion. Conventionally `./lockstep.schema.json` — both the manifest and its schema live side-by-side at repo root.", + "type": "string" + }, + "description": { + "description": "Human-readable description of what this manifest tracks. Read by humans, not parsed. One short paragraph.", + "type": "string" + }, + "area": { + "description": "Optional label for this manifest file. Used as a grouping key in harness output (per-area summaries). Defaults to 'root' for the top-level file and to the filename stem (with the `lockstep-` prefix stripped) for included files.", + "type": "string" + }, + "includes": { + "description": "Relative paths to sub-manifests. The harness loads each and merges its rows into a single flattened view. Top-level `upstreams` and `sites` maps override any same-keyed entries from included manifests (top wins on conflict).", + "type": "array", + "items": { + "type": "string" + } + }, + "upstreams": { + "description": "Named upstream submodules. Each entry pairs a submodule path with its repo URL. Referenced by rows[].upstream on file-fork / version-pin / feature-parity / spec-conformance rows. Omit when the manifest only has lang-parity rows.", + "type": "object", + "patternProperties": { + "^(.*)$": { + "additionalProperties": false, + "description": "A submodule + its upstream repo URL. Referenced by file-fork / version-pin / feature-parity / spec-conformance rows via `upstream`.", + "type": "object", + "required": ["submodule", "repo"], + "properties": { + "submodule": { + "description": "Submodule path, relative to repo root. Must match an entry in `.gitmodules`.", + "type": "string" + }, + "repo": { + "pattern": "^https?://[^/\\s]+", + "description": "Upstream repository URL (http:// or https:// + host). Anchored at the host so empty URLs fail validation rather than failing at git-fetch time.", + "type": "string" + } + } + } + } + }, + "sites": { + "description": "Named sibling ports (typically per-language: `cpp`, `go`, `rust`, `typescript`). Referenced by rows[].ports. on lang-parity rows. Omit when the manifest has no lang-parity rows.", + "type": "object", + "patternProperties": { + "^(.*)$": { + "additionalProperties": false, + "description": "A sibling port (typically per-language). Referenced by lang-parity rows via `ports.`.", + "type": "object", + "required": ["path"], + "properties": { + "path": { + "description": "Path to the port's root directory, relative to repo root. The harness reads files under this path when checking the port's assertions.", + "type": "string" + }, + "language": { + "description": "Language label for human reports (e.g. `cpp`, `go`, `rust`, `typescript`). The harness does no language-specific processing — it's purely informational.", + "type": "string" + } + } + } + } + }, + "rows": { + "description": "The actual checks the harness runs. Empty array is valid (and expected for repos that have no upstream relationships — e.g. socket-cli's empty rows).", + "type": "array", + "items": { + "anyOf": [ + { + "additionalProperties": false, + "description": "A local file derived from an upstream file with intentional modifications. Drift = upstream moved forward on this path; we may need to cherry-pick or update our deviations.", + "type": "object", + "required": [ + "kind", + "id", + "upstream", + "local", + "upstream_path", + "forked_at_sha", + "deviations" + ], + "properties": { + "kind": { + "const": "file-fork", + "type": "string" + }, + "id": { + "pattern": "^[a-z0-9][a-z0-9-]*(/[A-Za-z0-9_-]+)?$", + "description": "Stable identifier, unique within the manifest. Kebab-case (lowercase letters / digits / hyphens). For ids that mirror an external API name, use a namespace prefix: `api/findNodeAt`, `node/parseURL`. The slash separates the kebab namespace from the free-form leaf.", + "type": "string" + }, + "upstream": { + "description": "Key into the top-level `upstreams` map. The harness errors if no matching upstream entry exists.", + "type": "string" + }, + "criticality": { + "minimum": 1, + "maximum": 10, + "description": "Stay-in-step importance. Anchors: 1 = cosmetic / nice-to-have; 5 = behavioral parity expected; 10 = security-sensitive. The harness surfaces high-criticality drift louder and gates feature-parity rows on the criticality/10 floor.", + "type": "integer" + }, + "conformance_test": { + "description": "Path (relative to repo root) of a test that enforces behavior parity (modulo documented deviations). Strongly recommended — static checks catch syntactic drift, not behavioral. A row without a conformance test relies entirely on code-pattern / fixture-snapshot checks.", + "type": "string" + }, + "notes": { + "description": "Free-form context: why this row exists, gotchas, links to related issues / PRs / upstream discussions. Read by humans, not by the harness.", + "type": "string" + }, + "local": { + "description": "Path (relative to repo root) of our ported copy of the upstream file.", + "type": "string" + }, + "upstream_path": { + "description": "Path within the upstream submodule (relative to the submodule root) of the source file we forked from.", + "type": "string" + }, + "forked_at_sha": { + "pattern": "^[0-9a-f]{40}$", + "description": "Full 40-char SHA of the upstream commit we forked from. The harness runs `git log ..HEAD -- ` inside the submodule to surface drift.", + "type": "string" + }, + "deviations": { + "minItems": 1, + "description": "Human-readable list of intentional differences from upstream. Zero deviations = the file should not be forked; consume upstream directly. Each entry is one short sentence (e.g. `swap require() for import` or `remove Node 14 fallback`).", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + { + "additionalProperties": false, + "description": "A submodule pinned to an upstream release. Drift = upstream cut a new release we haven't adopted.", + "type": "object", + "required": [ + "kind", + "id", + "upstream", + "pinned_sha", + "upgrade_policy" + ], + "properties": { + "kind": { + "const": "version-pin", + "type": "string" + }, + "id": { + "pattern": "^[a-z0-9][a-z0-9-]*(/[A-Za-z0-9_-]+)?$", + "description": "Stable identifier, unique within the manifest. Kebab-case (lowercase letters / digits / hyphens). For ids that mirror an external API name, use a namespace prefix: `api/findNodeAt`, `node/parseURL`. The slash separates the kebab namespace from the free-form leaf.", + "type": "string" + }, + "upstream": { + "description": "Key into the top-level `upstreams` map. The harness errors if no matching upstream entry exists.", + "type": "string" + }, + "criticality": { + "minimum": 1, + "maximum": 10, + "description": "Stay-in-step importance. Anchors: 1 = cosmetic / nice-to-have; 5 = behavioral parity expected; 10 = security-sensitive. The harness surfaces high-criticality drift louder and gates feature-parity rows on the criticality/10 floor.", + "type": "integer" + }, + "conformance_test": { + "description": "Path (relative to repo root) of a test that enforces behavior parity (modulo documented deviations). Strongly recommended — static checks catch syntactic drift, not behavioral. A row without a conformance test relies entirely on code-pattern / fixture-snapshot checks.", + "type": "string" + }, + "notes": { + "description": "Free-form context: why this row exists, gotchas, links to related issues / PRs / upstream discussions. Read by humans, not by the harness.", + "type": "string" + }, + "pinned_sha": { + "pattern": "^[0-9a-f]{40}$", + "description": "Full 40-char SHA the submodule is pinned to. Authoritative — the harness compares this against the submodule HEAD, not against `pinned_tag`.", + "type": "string" + }, + "pinned_tag": { + "description": "Human-readable release tag for reports / PR titles (e.g. `v3.2.1`). Informational only — `pinned_sha` is the source of truth. Useful when an upstream cuts a release without changing semver but moves the SHA.", + "type": "string" + }, + "upgrade_policy": { + "description": "`track-latest` = any new release is actionable; updating-lockstep auto-bumps. `major-gate` = patch / minor auto-bump; major bumps surfaced as advisory. `locked` = explicit decision per upgrade; the harness reports drift but never auto-bumps. Pick `locked` when bumping is gated on a coordinated change in another repo (e.g. Node vendoring temporal-rs).", + "anyOf": [ + { + "const": "track-latest", + "type": "string" + }, + { + "const": "major-gate", + "type": "string" + }, + { + "const": "locked", + "type": "string" + } + ] + } + } + }, + { + "additionalProperties": false, + "description": "A behavioral feature reimplemented locally to match upstream behavior. Three-pillar validation: code patterns + test patterns + fixture snapshot. The total score is averaged across present pillars; rows below the criticality / 10 floor surface as drift.", + "type": "object", + "required": ["kind", "id", "upstream", "criticality", "local_area"], + "properties": { + "kind": { + "const": "feature-parity", + "type": "string" + }, + "id": { + "pattern": "^[a-z0-9][a-z0-9-]*(/[A-Za-z0-9_-]+)?$", + "description": "Stable identifier, unique within the manifest. Kebab-case (lowercase letters / digits / hyphens). For ids that mirror an external API name, use a namespace prefix: `api/findNodeAt`, `node/parseURL`. The slash separates the kebab namespace from the free-form leaf.", + "type": "string" + }, + "upstream": { + "description": "Key into the top-level `upstreams` map. The harness errors if no matching upstream entry exists.", + "type": "string" + }, + "criticality": { + "minimum": 1, + "maximum": 10, + "description": "Stay-in-step importance. Anchors: 1 = cosmetic / nice-to-have; 5 = behavioral parity expected; 10 = security-sensitive. The harness surfaces high-criticality drift louder and gates feature-parity rows on the criticality/10 floor.", + "type": "integer" + }, + "conformance_test": { + "description": "Path (relative to repo root) of a test that enforces behavior parity (modulo documented deviations). Strongly recommended — static checks catch syntactic drift, not behavioral. A row without a conformance test relies entirely on code-pattern / fixture-snapshot checks.", + "type": "string" + }, + "notes": { + "description": "Free-form context: why this row exists, gotchas, links to related issues / PRs / upstream discussions. Read by humans, not by the harness.", + "type": "string" + }, + "local_area": { + "description": "Path (relative to repo root) of the local module / directory implementing the feature. The code-pattern scan targets this directory recursively, excluding test files (matched by `*.test.{ts,mts,js,mjs}` and `*.spec.*`).", + "type": "string" + }, + "test_area": { + "description": "Path (relative to repo root) of the directory where tests for this feature live. When absent, the harness searches for tests inside `local_area`. Useful when tests live in a sibling directory (e.g. `local_area=src/auth`, `test_area=test/auth`).", + "type": "string" + }, + "code_patterns": { + "description": "Regex patterns the local implementation must contain. Prefer anchored patterns (function signatures, exported symbols) over loose keywords to avoid matching comments. Each pattern is searched independently across `local_area`; missing patterns lower the code score.", + "type": "array", + "items": { + "type": "string" + } + }, + "test_patterns": { + "description": "Regex patterns the test suite must contain. Same scoring as `code_patterns` but searched across `test_area` (or `local_area` when `test_area` is absent).", + "type": "array", + "items": { + "type": "string" + } + }, + "fixture_check": { + "additionalProperties": false, + "description": "Golden-input verification. Snapshot-based diffs replace the brittle hardcoded-count checks the harness used historically (sdxgen's lock-step-features lesson).", + "type": "object", + "required": ["fixture_path"], + "properties": { + "fixture_path": { + "description": "Path (relative to repo root) of the input fixture the local implementation runs against.", + "type": "string" + }, + "snapshot_path": { + "description": "Path (relative to repo root) of the snapshot file the implementation's output is diffed against. When absent, the harness only checks that the fixture is processed without error — no output comparison.", + "type": "string" + }, + "diff_tolerance": { + "description": "How the snapshot diff is computed. `exact` = byte-identical; the strictest check. `line-by-line` = per-line diff after normalizing line endings (CRLF / LF); tolerates trailing-newline drift. `semantic` = harness-defined deeper comparison (typically AST or normalized JSON for output that has equivalent representations); each row kind documents what `semantic` means in its context.", + "anyOf": [ + { + "const": "exact", + "type": "string" + }, + { + "const": "line-by-line", + "type": "string" + }, + { + "const": "semantic", + "type": "string" + } + ] + } + } + } + } + }, + { + "additionalProperties": false, + "description": "A local reimplementation of an external specification. Drift = the spec was revised; we may need to update our impl, the spec_version, or both.", + "type": "object", + "required": [ + "kind", + "id", + "upstream", + "local_impl", + "spec_version" + ], + "properties": { + "kind": { + "const": "spec-conformance", + "type": "string" + }, + "id": { + "pattern": "^[a-z0-9][a-z0-9-]*(/[A-Za-z0-9_-]+)?$", + "description": "Stable identifier, unique within the manifest. Kebab-case (lowercase letters / digits / hyphens). For ids that mirror an external API name, use a namespace prefix: `api/findNodeAt`, `node/parseURL`. The slash separates the kebab namespace from the free-form leaf.", + "type": "string" + }, + "upstream": { + "description": "Key into the top-level `upstreams` map. The harness errors if no matching upstream entry exists.", + "type": "string" + }, + "criticality": { + "minimum": 1, + "maximum": 10, + "description": "Stay-in-step importance. Anchors: 1 = cosmetic / nice-to-have; 5 = behavioral parity expected; 10 = security-sensitive. The harness surfaces high-criticality drift louder and gates feature-parity rows on the criticality/10 floor.", + "type": "integer" + }, + "conformance_test": { + "description": "Path (relative to repo root) of a test that enforces behavior parity (modulo documented deviations). Strongly recommended — static checks catch syntactic drift, not behavioral. A row without a conformance test relies entirely on code-pattern / fixture-snapshot checks.", + "type": "string" + }, + "notes": { + "description": "Free-form context: why this row exists, gotchas, links to related issues / PRs / upstream discussions. Read by humans, not by the harness.", + "type": "string" + }, + "local_impl": { + "description": "Path (relative to repo root) of our reimplementation of the spec. Either a file or a directory.", + "type": "string" + }, + "spec_version": { + "description": "Version label of the spec we conform to (e.g. `ECMAScript-2024`, `RFC-9110`, commit SHA, or upstream tag). Free-form — the harness only checks for drift via the upstream submodule, not the version string itself.", + "type": "string" + }, + "spec_path": { + "description": "Path within the upstream submodule to the spec document. Used to scope drift detection to the spec file (rather than every change in the upstream repo).", + "type": "string" + } + } + }, + { + "additionalProperties": false, + "description": "N sibling language ports of one spec within a single project. Drift = a port diverged from its siblings (one implemented, others opt-out without reason / or vice versa), or a `rejected` anti-pattern was reintroduced.", + "type": "object", + "required": [ + "kind", + "id", + "name", + "description", + "category", + "ports" + ], + "properties": { + "kind": { + "const": "lang-parity", + "type": "string" + }, + "id": { + "pattern": "^[a-z0-9][a-z0-9-]*(/[A-Za-z0-9_-]+)?$", + "description": "Stable identifier, unique within the manifest. Kebab-case (lowercase letters / digits / hyphens). For ids that mirror an external API name, use a namespace prefix: `api/findNodeAt`, `node/parseURL`. The slash separates the kebab namespace from the free-form leaf.", + "type": "string" + }, + "name": { + "description": "Short human-readable label for this row (e.g. `Range parsing`, `Async iterators`). Used in report headers; not parsed.", + "type": "string" + }, + "description": { + "description": "One-paragraph description of what behavior this row asserts on each port. Read by humans; not parsed.", + "type": "string" + }, + "category": { + "description": "Grouping tag for report aggregation (e.g. `parser`, `runtime`, `api`). The single magic value is `rejected` — RESERVED for anti-patterns: every port MUST be `opt-out`, and any port flipping to `implemented` exits 2 ('rejected anti-pattern reintroduced'). Use freely otherwise.", + "type": "string" + }, + "criticality": { + "minimum": 1, + "maximum": 10, + "description": "Stay-in-step importance. Anchors: 1 = cosmetic / nice-to-have; 5 = behavioral parity expected; 10 = security-sensitive. The harness surfaces high-criticality drift louder and gates feature-parity rows on the criticality/10 floor.", + "type": "integer" + }, + "conformance_test": { + "description": "Path (relative to repo root) of a test that enforces behavior parity (modulo documented deviations). Strongly recommended — static checks catch syntactic drift, not behavioral. A row without a conformance test relies entirely on code-pattern / fixture-snapshot checks.", + "type": "string" + }, + "notes": { + "description": "Free-form context: why this row exists, gotchas, links to related issues / PRs / upstream discussions. Read by humans, not by the harness.", + "type": "string" + }, + "assertions": { + "description": "Assertions checked against each port. Each entry is `{kind: string, ...}`; the harness dispatches on `kind`. See AssertionSchema description for known kinds; unknown kinds skip with a log line. Mutually compatible with `matrix_files` (a row can have both, neither, or one).", + "type": "array", + "items": { + "description": "A typed assertion the lang-parity row asserts on each port. Shape: `{kind: string, ...kind-specific fields}`. The lockstep harness dispatches on `kind`; per-kind contracts are documented in the harness, not here.", + "type": "object", + "patternProperties": { + "^(.*)$": {} + } + } + }, + "matrix_files": { + "description": "Paths (relative to this manifest) of `lockstep-lang-*.json` sub-manifests this row indexes. For inventory-style rows that group many smaller checks under one parent. The harness loads each and merges its rows.", + "type": "array", + "items": { + "type": "string" + } + }, + "ports": { + "description": "Per-port status map. Keys MUST match top-level `sites` keys exactly — the harness errors on stray ports / missing sites. Each value is `{status: 'implemented' | 'opt-out', ...}` per PortStatusSchema.", + "type": "object", + "patternProperties": { + "^(.*)$": { + "additionalProperties": false, + "description": "Per-port status for a lang-parity row. The `ports` map on a row pairs each top-level `sites` key with one of these.", + "type": "object", + "required": ["status"], + "properties": { + "status": { + "description": "`implemented` = port meets the row's assertions; `opt-out` = port consciously skips this row (requires `reason`).", + "anyOf": [ + { + "const": "implemented", + "type": "string" + }, + { + "const": "opt-out", + "type": "string" + } + ] + }, + "reason": { + "description": "Why this port opts out. SCHEMA-CONDITIONAL: required when status is `opt-out`. The TypeBox type cannot express the conditional, but the harness rejects opt-out rows with empty / missing reason.", + "type": "string" + }, + "path": { + "description": "Optional path to this port's implementation of the row. Useful for module-inventory rows where each language points at a different directory; redundant when the port's overall layout already encodes the path.", + "type": "string" + }, + "note": { + "description": "Optional free-form note attached to this specific port's status. For multi-port context, prefer the row-level `notes` field.", + "type": "string" + } + } + } + } + } + } + } + ] + } + } + } +} diff --git a/.config/oxlint-plugin/index.mts b/.config/oxlint-plugin/index.mts new file mode 100644 index 0000000..8008216 --- /dev/null +++ b/.config/oxlint-plugin/index.mts @@ -0,0 +1,83 @@ +/** + * @fileoverview Fleet oxlint plugin. Custom rules that encode the + * fleet's CLAUDE.md style guide as lint errors with autofix where + * the rewrite is unambiguous. + * + * Why a plugin instead of a separate scanner: oxlint's native plugin + * surface integrates with the existing `pnpm run lint` pipeline, + * inherits oxlint's AST + sourcemap + fix-application machinery, and + * keeps the rule set discoverable via `oxlint --rules`. + * + * Wiring: `.config/oxlintrc.json` adds this plugin via `jsPlugins: + * ["./oxlint-plugin/index.mts"]` and enables rules under the + * `socket/` namespace. + */ + +import exportTopLevelFunctions from './rules/export-top-level-functions.mts' +import inclusiveLanguage from './rules/inclusive-language.mts' +import maxFileLines from './rules/max-file-lines.mts' +import noConsolePreferLogger from './rules/no-console-prefer-logger.mts' +import noDefaultExport from './rules/no-default-export.mts' +import noDynamicImportOutsideBundle from './rules/no-dynamic-import-outside-bundle.mts' +import noFetchPreferHttpRequest from './rules/no-fetch-prefer-http-request.mts' +import noInlineLogger from './rules/no-inline-logger.mts' +import noNpxDlx from './rules/no-npx-dlx.mts' +import noPlaceholders from './rules/no-placeholders.mts' +import noPromiseRace from './rules/no-promise-race.mts' +import noPromiseRaceInLoop from './rules/no-promise-race-in-loop.mts' +import noStatusEmoji from './rules/no-status-emoji.mts' +import personalPathPlaceholders from './rules/personal-path-placeholders.mts' +import preferAsyncSpawn from './rules/prefer-async-spawn.mts' +import preferCachedForLoop from './rules/prefer-cached-for-loop.mts' +import preferExistsSync from './rules/prefer-exists-sync.mts' +import preferFunctionDeclaration from './rules/prefer-function-declaration.mts' +import preferNodeBuiltinImports from './rules/prefer-node-builtin-imports.mts' +import preferSafeDelete from './rules/prefer-safe-delete.mts' +import preferSeparateTypeImport from './rules/prefer-separate-type-import.mts' +import preferUndefinedOverNull from './rules/prefer-undefined-over-null.mts' +import socketApiTokenEnv from './rules/socket-api-token-env.mts' +import sortEqualityDisjunctions from './rules/sort-equality-disjunctions.mts' +import sortNamedImports from './rules/sort-named-imports.mts' +import sortRegexAlternations from './rules/sort-regex-alternations.mts' +import sortSetArgs from './rules/sort-set-args.mts' +import sortSourceMethods from './rules/sort-source-methods.mts' + +/** @type {import('eslint').ESLint.Plugin} */ +const plugin = { + meta: { + name: 'socket', + version: '0.5.0', + }, + rules: { + 'export-top-level-functions': exportTopLevelFunctions, + 'inclusive-language': inclusiveLanguage, + 'max-file-lines': maxFileLines, + 'no-console-prefer-logger': noConsolePreferLogger, + 'no-default-export': noDefaultExport, + 'no-dynamic-import-outside-bundle': noDynamicImportOutsideBundle, + 'no-fetch-prefer-http-request': noFetchPreferHttpRequest, + 'no-inline-logger': noInlineLogger, + 'no-npx-dlx': noNpxDlx, + 'no-placeholders': noPlaceholders, + 'no-promise-race': noPromiseRace, + 'no-promise-race-in-loop': noPromiseRaceInLoop, + 'no-status-emoji': noStatusEmoji, + 'personal-path-placeholders': personalPathPlaceholders, + 'prefer-async-spawn': preferAsyncSpawn, + 'prefer-cached-for-loop': preferCachedForLoop, + 'prefer-exists-sync': preferExistsSync, + 'prefer-function-declaration': preferFunctionDeclaration, + 'prefer-node-builtin-imports': preferNodeBuiltinImports, + 'prefer-safe-delete': preferSafeDelete, + 'prefer-separate-type-import': preferSeparateTypeImport, + 'prefer-undefined-over-null': preferUndefinedOverNull, + 'socket-api-token-env': socketApiTokenEnv, + 'sort-equality-disjunctions': sortEqualityDisjunctions, + 'sort-named-imports': sortNamedImports, + 'sort-regex-alternations': sortRegexAlternations, + 'sort-set-args': sortSetArgs, + 'sort-source-methods': sortSourceMethods, + }, +} + +export default plugin diff --git a/.config/oxlint-plugin/package.json b/.config/oxlint-plugin/package.json new file mode 100644 index 0000000..fbbf76b --- /dev/null +++ b/.config/oxlint-plugin/package.json @@ -0,0 +1,9 @@ +{ + "name": "socket-oxlint-plugin", + "private": true, + "type": "module", + "main": "./index.mts", + "exports": { + ".": "./index.mts" + } +} diff --git a/.config/oxlint-plugin/rules/_inject-import.mts b/.config/oxlint-plugin/rules/_inject-import.mts new file mode 100644 index 0000000..254ad04 --- /dev/null +++ b/.config/oxlint-plugin/rules/_inject-import.mts @@ -0,0 +1,110 @@ +/** + * @fileoverview Shared helper for rule fixers that need to inject + * an `import { Name } from 'specifier'` statement (and optionally a + * matching hoisted `const`) into a file. + * + * Fixers call `summarizeImportTarget(programNode, specifier, importName)` + * to learn the file's current shape, then `appendImportFixes(...)` + * inside their `fix(fixer)` callback to add the missing pieces. + * + * ESLint's autofixer dedupes overlapping inserts at the same range, + * so multiple violations in the same file can each emit the import + * insertion safely — only one survives. + */ + +/** + * Walk a Program node body once and figure out: + * - the last top-level ImportDeclaration node (or undefined) + * - whether `importName` is already imported (from ANY source) + * - whether a top-level `localName` identifier already exists + * (any const/let/var or import-as-local with that name) + * + * Import detection ignores the specifier path: a file inside the lib + * package itself imports `getDefaultLogger` from `'../logger'`, while + * a downstream repo imports the same name from + * `'@socketsecurity/lib/logger'`. Both resolve to the same identifier; + * either should count as "already imported" so the autofix doesn't + * inject a duplicate (and broken — see issue #64). + * + * `specifier` is retained in the signature for backward compatibility + * but is no longer used for the match decision. Callers may pass any + * truthy value (typically the canonical package path the rule would + * inject if the import were missing). + */ +export function summarizeImportTarget( + program, + // eslint-disable-next-line no-unused-vars + specifier, + importName, + localName, +) { + let lastImport + let hasImport = false + let hasLocal = false + for (const stmt of program.body) { + if (stmt.type === 'ImportDeclaration') { + lastImport = stmt + for (const spec of stmt.specifiers) { + if ( + spec.type === 'ImportSpecifier' && + spec.imported && + spec.imported.name === importName + ) { + hasImport = true + } + if ( + localName && + spec.local && + spec.local.name === localName && + (spec.type === 'ImportSpecifier' || + spec.type === 'ImportDefaultSpecifier' || + spec.type === 'ImportNamespaceSpecifier') + ) { + hasLocal = true + } + } + continue + } + if (localName && stmt.type === 'VariableDeclaration') { + for (const decl of stmt.declarations) { + if ( + decl.id && + decl.id.type === 'Identifier' && + decl.id.name === localName + ) { + hasLocal = true + } + } + } + } + return { hasImport, hasLocal, lastImport } +} + +/** + * Build the fixer-side inserts for missing import + optional hoist. + * Returns an array of fixer operations the caller appends to its own + * fix() return value. + * + * summary — output of summarizeImportTarget() + * fixer — the fixer passed to context.report({ fix }) + * importLine — the literal `import { ... } from '...'` text + * hoistLine — optional; the literal `const x = ...()` text + */ +export function appendImportFixes(summary, fixer, importLine, hoistLine) { + const ops = [] + if (!summary.hasImport) { + if (summary.lastImport) { + ops.push(fixer.insertTextAfter(summary.lastImport, `\n${importLine}`)) + } else { + ops.push(fixer.insertTextBeforeRange([0, 0], `${importLine}\n`)) + } + } + if (hoistLine && !summary.hasLocal) { + if (summary.lastImport) { + ops.push(fixer.insertTextAfter(summary.lastImport, `\n\n${hoistLine}`)) + } else { + ops.push(fixer.insertTextBeforeRange([0, 0], `${hoistLine}\n\n`)) + } + } + return ops +} diff --git a/.config/oxlint-plugin/rules/export-top-level-functions.mts b/.config/oxlint-plugin/rules/export-top-level-functions.mts new file mode 100644 index 0000000..cb4741b --- /dev/null +++ b/.config/oxlint-plugin/rules/export-top-level-functions.mts @@ -0,0 +1,122 @@ +/** + * @fileoverview Require every top-level `function` declaration to be + * `export`ed. Per the fleet rule: "we should export all methods for + * testing." Exposing internal helpers as named exports lets tests + * import them directly, no `__test_only__` shim or per-test rebuild. + * + * Scope: top-level function declarations only (not class methods, + * not arrow functions assigned to const, not local nested functions). + * Local helpers and arrow-as-const are visible to their parent + * module's tests via the parent function; only the top-level surface + * needs explicit export. + * + * Allowed exceptions (skipped): + * - The function is named `main` (script entrypoint convention). + * + * Autofix: prepends `export ` to the function declaration when the + * function isn't already named in a sibling `export { ... }` + * statement. If a named-re-export already exists, report without + * autofix (the human picks: keep the named-re-export shape, or + * collapse to the inline `export function`). + */ + +const SCRIPT_ENTRY_NAMES = new Set(['main']) + +/** + * Walk Program body once and collect names exported via: + * - `export { foo, bar }` + * - `export { foo as bar }` (the local-name `foo` counts) + * - `export default foo` + * + * Function declarations that already say `export function foo` won't + * reach this rule's visitor (the visitor matches bare function + * declarations only via `Program > FunctionDeclaration`; an + * `ExportNamedDeclaration` wraps them in a different shape). + */ +function collectExportedNames(program) { + const exported = new Set() + for (const stmt of program.body) { + if (stmt.type === 'ExportNamedDeclaration' && !stmt.declaration) { + // `export { foo, bar as baz }` — count the local name. + for (const spec of stmt.specifiers) { + if (spec.local && spec.local.type === 'Identifier') { + exported.add(spec.local.name) + } + } + } + if ( + stmt.type === 'ExportDefaultDeclaration' && + stmt.declaration && + stmt.declaration.type === 'Identifier' + ) { + exported.add(stmt.declaration.name) + } + } + return exported +} + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'suggestion', + docs: { + description: + 'Require top-level function declarations to be exported (testability).', + category: 'Best Practices', + recommended: true, + }, + fixable: 'code', + messages: { + missing: + 'Top-level function `{{name}}` should be `export function {{name}}`. Exporting internal helpers makes them directly testable.', + missingAlreadyReExported: + 'Top-level function `{{name}}` is named in a separate `export {{ }}` statement; collapse to inline `export function {{name}}` for clarity (autofix skipped to avoid creating a duplicate export).', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + let exportedNames + + return { + 'Program > FunctionDeclaration'(node) { + if (!node.id || node.id.type !== 'Identifier') { + return + } + const name = node.id.name + if (SCRIPT_ENTRY_NAMES.has(name)) { + return + } + if (!exportedNames) { + exportedNames = collectExportedNames(sourceCode.ast) + } + if (exportedNames.has(name)) { + // Already exported via `export { name }` — report without + // autofix; the human can choose whether to collapse to the + // inline export. + context.report({ + node: node.id, + messageId: 'missingAlreadyReExported', + data: { name }, + }) + return + } + context.report({ + node: node.id, + messageId: 'missing', + data: { name }, + fix(fixer) { + // Insert `export ` at the function's start. Handles both + // `function name(...)` and `async function name(...)`. + return fixer.insertTextBefore(node, 'export ') + }, + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/inclusive-language.mts b/.config/oxlint-plugin/rules/inclusive-language.mts new file mode 100644 index 0000000..c8109a9 --- /dev/null +++ b/.config/oxlint-plugin/rules/inclusive-language.mts @@ -0,0 +1,410 @@ +/* oxlint-disable socket/inclusive-language -- this file IS the rule definition; the legacy terms are lookup-table data, not real usage. */ + +/** + * @fileoverview Per CLAUDE.md "Inclusive language" rule (full table + * in docs/references/inclusive-language.md). + * + * Substitutions: + * + * whitelist → allowlist + * blacklist → denylist + * master → main / primary + * slave → replica / secondary / worker + * grandfathered → legacy + * sanity check → quick check + * dummy → placeholder + * + * Detects identifiers, string literals, and comments containing the + * legacy terms. Word-boundary matched on the literal stem so case + * variants `Whitelist` / `WHITELIST` / `whitelisted` all fire. + * + * Autofix: + * - Identifiers and string literals: rewrite case-preserving + * (e.g. `Whitelist` → `Allowlist`, `WHITELIST` → `ALLOWLIST`, + * `whitelistEntry` → `allowlistEntry`). + * - Comments: rewrite the comment text in place, same case rules. + * - Multi-word terms (`sanity check`, `master branch`): only the + * first word is replaced; the rest is left alone (`sanity check` + * → `quick check`). + * + * Allowed exceptions (skipped — no report, no fix): + * - Third-party API field references: comment with + * `inclusive-language: external-api` adjacent to the line. + * - Vendored / fixture paths: handled at the .config/oxlintrc.json + * ignorePatterns level; this rule trusts the include set. + * - The literal phrase "main / primary" / etc. inside a doc that + * spells out the substitution table — handled by the + * `docs/references/inclusive-language.md` ignore pattern in + * .config/oxlintrc.json (caller adds the override). + */ + +// [legacyStem, replacementStem]. The detector matches the stem +// case-insensitively and word-boundary anchored. Replacement preserves +// case shape. +const SUBSTITUTIONS = [ + ['whitelist', 'allowlist'], + ['blacklist', 'denylist'], + ['grandfathered', 'legacy'], + ['sanity', 'quick'], + ['dummy', 'placeholder'], + // master/slave are loaded but rewriting requires more nuance — only + // flag, never autofix (could mean main/primary/controller; depends + // on the surrounding domain). +] + +const REPORT_ONLY = new Set(['master', 'slave']) +const REPORT_ONLY_TERMS = ['master', 'slave'] + +const BYPASS_RE = /inclusive-language:\s*external-api/ + +/** Build a regex matching any legacy stem with word boundaries. + * + * Stems are sorted alphabetically before being joined so the regex + * alternation has a deterministic, stable form. Two reasons: + * 1. The fleet ships a `sort-regex-alternations` rule that flags + * unsorted `(a|b|c)`-style alternations; this regex would trip + * its own sibling rule without the sort. + * 2. Regex engines treat `|` as "first match wins" when alternatives + * have shared prefixes — sorting keeps the precedence visible + * in source rather than depending on declaration order. + */ +function buildDetectorRegex() { + const stems = [ + ...SUBSTITUTIONS.map(([legacy]) => legacy), + ...REPORT_ONLY_TERMS, + ].sort() + return new RegExp(`\\b(${stems.join('|')})\\w*`, 'gi') +} + +const DETECTOR_RE = buildDetectorRegex() + +/** + * Replace a single hit `match` (e.g. `Whitelist`, `WHITELIST`, + * `whitelisted`, `whitelistEntry`) with the case-preserving form of + * the new stem. Returns undefined when there's no autofix-able + * substitution (master/slave). + */ +function rewriteHit(match) { + const lower = match.toLowerCase() + for (const [legacy, replacement] of SUBSTITUTIONS) { + if (!lower.startsWith(legacy)) { + continue + } + const tail = match.slice(legacy.length) + const original = match.slice(0, legacy.length) + let rebuilt + if (original === original.toUpperCase()) { + rebuilt = replacement.toUpperCase() + } else if (original[0] === original[0].toUpperCase()) { + rebuilt = replacement[0].toUpperCase() + replacement.slice(1) + } else { + rebuilt = replacement + } + return rebuilt + tail + } + return undefined +} + +function findHits(text) { + const hits = [] + DETECTOR_RE.lastIndex = 0 + let m + while ((m = DETECTOR_RE.exec(text)) !== null) { + const stem = m[1].toLowerCase() + hits.push({ + start: m.index, + end: m.index + m[0].length, + match: m[0], + stem, + }) + } + return hits +} + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'suggestion', + docs: { + description: + 'Use inclusive language. Replace whitelist/blacklist/master/slave/grandfathered/sanity/dummy per the fleet substitution table.', + category: 'Stylistic Issues', + recommended: true, + }, + fixable: 'code', + messages: { + legacy: + '`{{match}}` — replace with the inclusive-language equivalent. See docs/references/inclusive-language.md.', + legacyMaster: + '`{{match}}` — replace with `main` (branch), `primary` / `controller` (process). Manual rewrite — context decides which fits.', + legacySlave: + '`{{match}}` — replace with `replica` / `worker` / `secondary` / `follower`. Manual rewrite — context decides which fits.', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + + function hasBypassComment(node) { + const before = sourceCode.getCommentsBefore(node) + const after = sourceCode.getCommentsAfter(node) + for (const c of [...before, ...after]) { + if (BYPASS_RE.test(c.value)) { + return true + } + } + // Fall-back: scan the entire source line containing the node for + // a trailing bypass comment. AST-level "after" comments stop at + // the statement boundary, but a chained method call's string + // literal won't see a trailing comment on the same physical line. + const loc = node.loc + if (loc && loc.start.line === loc.end.line) { + const lineText = sourceCode.lines?.[loc.start.line - 1] + if (lineText && BYPASS_RE.test(lineText)) { + return true + } + } + return false + } + + function reportHit(node, hit, replaceFn) { + let messageId = 'legacy' + if (hit.stem === 'master') { + messageId = 'legacyMaster' + } else if (hit.stem === 'slave') { + messageId = 'legacySlave' + } + const isReportOnly = REPORT_ONLY.has(hit.stem) + const replacement = isReportOnly ? undefined : rewriteHit(hit.match) + if (!replacement) { + context.report({ node, messageId, data: { match: hit.match } }) + return + } + context.report({ + node, + messageId, + data: { match: hit.match }, + fix(fixer) { + return replaceFn(fixer, hit, replacement) + }, + }) + } + + function checkIdentifier(node) { + if (!node.name) { + return + } + const hits = findHits(node.name) + if (hits.length === 0) { + return + } + if (hasBypassComment(node)) { + return + } + // Identifiers can have multiple hits in compound names — + // process each and merge into a single rewrite. + let rebuilt = '' + let cursor = 0 + let mutated = false + let allReportOnly = true + for (const h of hits) { + rebuilt += node.name.slice(cursor, h.start) + const replacement = REPORT_ONLY.has(h.stem) + ? undefined + : rewriteHit(h.match) + if (replacement) { + rebuilt += replacement + mutated = true + allReportOnly = false + } else { + rebuilt += h.match + } + cursor = h.end + } + rebuilt += node.name.slice(cursor) + + if (!mutated) { + // All hits are report-only (master/slave) — emit one report + // for each. + for (const h of hits) { + let messageId = 'legacy' + if (h.stem === 'master') { + messageId = 'legacyMaster' + } else if (h.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ node, messageId, data: { match: h.match } }) + } + return + } + + // Emit one report per hit but a single combined fix. + const firstHit = hits[0] + let messageId = 'legacy' + if (firstHit.stem === 'master') { + messageId = 'legacyMaster' + } else if (firstHit.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ + node, + messageId, + data: { match: firstHit.match }, + fix(fixer) { + return fixer.replaceText(node, rebuilt) + }, + }) + } + + return { + Identifier: checkIdentifier, + + Literal(node) { + if (typeof node.value !== 'string') { + return + } + const hits = findHits(node.value) + if (hits.length === 0) { + return + } + if (hasBypassComment(node)) { + return + } + + let rebuilt = '' + let cursor = 0 + let mutated = false + for (const h of hits) { + rebuilt += node.value.slice(cursor, h.start) + const replacement = REPORT_ONLY.has(h.stem) + ? undefined + : rewriteHit(h.match) + if (replacement) { + rebuilt += replacement + mutated = true + } else { + rebuilt += h.match + } + cursor = h.end + } + rebuilt += node.value.slice(cursor) + + if (!mutated) { + for (const h of hits) { + let messageId = 'legacy' + if (h.stem === 'master') { + messageId = 'legacyMaster' + } else if (h.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ node, messageId, data: { match: h.match } }) + } + return + } + + const firstHit = hits[0] + let messageId = 'legacy' + if (firstHit.stem === 'master') { + messageId = 'legacyMaster' + } else if (firstHit.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ + node, + messageId, + data: { match: firstHit.match }, + fix(fixer) { + const raw = sourceCode.getText(node) + const quote = raw[0] + if (quote === '`') { + return fixer.replaceText(node, '`' + rebuilt + '`') + } + const escaped = rebuilt.replace( + new RegExp(`\\\\|${quote}`, 'g'), + ch => '\\' + ch, + ) + return fixer.replaceText(node, quote + escaped + quote) + }, + }) + }, + + Program() { + // Sweep comments — rewriting comment bodies is harmless even + // when literal text matches "legacy" examples, because the + // bypass comment + ignorePatterns handle external-API and + // vendored cases. + const comments = sourceCode.getAllComments() + for (const comment of comments) { + if (BYPASS_RE.test(comment.value)) { + continue + } + const hits = findHits(comment.value) + if (hits.length === 0) { + continue + } + + let rebuilt = '' + let cursor = 0 + let mutated = false + for (const h of hits) { + rebuilt += comment.value.slice(cursor, h.start) + const replacement = REPORT_ONLY.has(h.stem) + ? undefined + : rewriteHit(h.match) + if (replacement) { + rebuilt += replacement + mutated = true + } else { + rebuilt += h.match + } + cursor = h.end + } + rebuilt += comment.value.slice(cursor) + + if (!mutated) { + for (const h of hits) { + let messageId = 'legacy' + if (h.stem === 'master') { + messageId = 'legacyMaster' + } else if (h.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ + node: comment, + messageId, + data: { match: h.match }, + }) + } + continue + } + + const firstHit = hits[0] + let messageId = 'legacy' + if (firstHit.stem === 'master') { + messageId = 'legacyMaster' + } else if (firstHit.stem === 'slave') { + messageId = 'legacySlave' + } + context.report({ + node: comment, + messageId, + data: { match: firstHit.match }, + fix(fixer) { + const prefix = comment.type === 'Line' ? '//' : '/*' + const suffix = comment.type === 'Line' ? '' : '*/' + return fixer.replaceTextRange( + comment.range, + prefix + rebuilt + suffix, + ) + }, + }) + } + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/max-file-lines.mts b/.config/oxlint-plugin/rules/max-file-lines.mts new file mode 100644 index 0000000..ac025c6 --- /dev/null +++ b/.config/oxlint-plugin/rules/max-file-lines.mts @@ -0,0 +1,92 @@ +/** + * @fileoverview Per CLAUDE.md "File size" rule: + * + * Source files have a soft cap of 500 lines and a hard cap of 1000 + * lines. Past those thresholds, split the file along its natural + * seams. + * + * Two severities: + * - >500 lines: warning, with the message pointing at the splitting + * guidance in CLAUDE.md. + * - >1000 lines: error. + * + * No autofix — splitting requires judgment about where the natural + * seams are. The rule's job is to make the cap visible at every + * commit. + * + * Allowed exceptions: + * - Files marked at the top with a comment containing + * `max-file-lines: legitimate parser/state-machine/table` or + * `eslint-disable socket/max-file-lines`. Per CLAUDE.md the rare + * legitimate cases are parsers, state machines, and config tables; + * they should self-document with a one-line comment. + * - Generated artifacts — the rule trusts .config/oxlintrc.json's + * ignorePatterns to keep generated files out of scope. + */ + +const SOFT_CAP = 500 +const HARD_CAP = 1000 + +const BYPASS_RE = + /max-file-lines:\s*(legitimate|parser|state[- ]?machine|table)/i + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'suggestion', + docs: { + description: + 'Files have a soft cap of 500 lines (warn) and a hard cap of 1000 lines (error). Split along natural seams.', + category: 'Best Practices', + recommended: true, + }, + messages: { + soft: '{{lines}} lines — past the 500-line soft cap. Consider splitting along natural seams (one tool / domain / phase per file). See CLAUDE.md "File size".', + hard: '{{lines}} lines — past the 1000-line hard cap. Split this file. See CLAUDE.md "File size".', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + + return { + Program(node) { + // Trust the parser's location info — `loc.end.line` is the + // 1-indexed line of the last token. Empty trailing lines are + // counted as part of the source per the line-counting + // convention CLAUDE.md uses. + const lines = node.loc.end.line + + if (lines <= SOFT_CAP) { + return + } + + // Bypass detection — scan leading comments only. A bypass + // comment buried 600 lines deep doesn't communicate intent at + // the file level. + const leadingComments = sourceCode + .getAllComments() + .filter(c => c.loc.start.line <= 5) + for (const c of leadingComments) { + if (BYPASS_RE.test(c.value)) { + return + } + } + + const messageId = lines > HARD_CAP ? 'hard' : 'soft' + // Anchor the report at line 1 — the file as a whole is the + // problem, not any specific node. + context.report({ + loc: { line: 1, column: 0 }, + messageId, + data: { lines: String(lines) }, + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-console-prefer-logger.mts b/.config/oxlint-plugin/rules/no-console-prefer-logger.mts new file mode 100644 index 0000000..43ce88e --- /dev/null +++ b/.config/oxlint-plugin/rules/no-console-prefer-logger.mts @@ -0,0 +1,123 @@ +/** + * @fileoverview Ban `console.log` / `console.error` / `console.warn` + * / `console.info` / `console.debug` / `console.trace`. The fleet uses + * `getDefaultLogger()` from `@socketsecurity/lib/logger` — those + * methods emit theme-aware coloring + canonical symbols. + * + * Autofix: rewrites `console.(...)` → `logger.(...)` + * AND inserts the missing pieces in one go: + * + * 1. `import { getDefaultLogger } from '@socketsecurity/lib/logger'` + * — appended after the last existing top-level import (or at the + * top of the file if there are none). + * 2. `const logger = getDefaultLogger()` — appended after the import + * block (so `logger` is hoisted at module scope). + * + * Each `console.(...)` call site emits its own fix + * independently. ESLint's autofixer dedupes overlapping inserts (the + * import line + hoist), so the visit order is irrelevant. + */ + +import { appendImportFixes, summarizeImportTarget } from './_inject-import.mts' + +const CONSOLE_TO_LOGGER = { + debug: 'log', + error: 'fail', + info: 'info', + log: 'log', + trace: 'log', + warn: 'warn', +} + +const LOGGER_IMPORT_LINE = + "import { getDefaultLogger } from '@socketsecurity/lib/logger'" +const LOGGER_HOIST_LINE = 'const logger = getDefaultLogger()' + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'problem', + docs: { + description: + 'Ban console.* calls; use logger from @socketsecurity/lib/logger.', + category: 'Best Practices', + recommended: true, + }, + fixable: 'code', + messages: { + banned: + 'console.{{method}}() — use logger.{{loggerMethod}}() from @socketsecurity/lib/logger.', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + + let summary + + function ensureSummary() { + if (summary) { + return summary + } + summary = summarizeImportTarget( + sourceCode.ast, + '@socketsecurity/lib/logger', + 'getDefaultLogger', + 'logger', + ) + return summary + } + + return { + MemberExpression(node) { + if ( + node.object.type !== 'Identifier' || + node.object.name !== 'console' || + node.property.type !== 'Identifier' + ) { + return + } + const method = node.property.name + const loggerMethod = CONSOLE_TO_LOGGER[method] + if (!loggerMethod) { + return + } + + // Only flag when console. is the callee of a call + // (skip e.g. `typeof console.log` or destructuring). + const parent = node.parent + if ( + !parent || + parent.type !== 'CallExpression' || + parent.callee !== node + ) { + return + } + + const s = ensureSummary() + + context.report({ + node, + messageId: 'banned', + data: { method, loggerMethod }, + fix(fixer) { + return [ + fixer.replaceText(node, `logger.${loggerMethod}`), + ...appendImportFixes( + s, + fixer, + LOGGER_IMPORT_LINE, + LOGGER_HOIST_LINE, + ), + ] + }, + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-default-export.mts b/.config/oxlint-plugin/rules/no-default-export.mts new file mode 100644 index 0000000..98ea18e --- /dev/null +++ b/.config/oxlint-plugin/rules/no-default-export.mts @@ -0,0 +1,108 @@ +/** + * @fileoverview Forbid `export default` — fleet convention is named + * exports only. Default exports lose the name at the import site + * (`import x from 'mod'` lets the caller rename freely), defeat + * grep / "find references" tools, and don't compose with re-exports + * (`export * from 'mod'` skips the default). + * + * Style signal that motivated the rule: across socket-sdk-js, + * socket-cli, socket-packageurl-js, socket-sdxgen, socket-lib, and + * socket-stuie, the named-vs-default ratio is essentially + * 100-to-1 — socket-lib has zero `export default` statements, the + * other repos have a handful of stragglers each. + * + * Autofix scope: + * - `export default function foo() {}` → `export function foo() {}` + * - `export default class Foo {}` → `export class Foo {}` + * - `export default ` (separate-declaration form) → + * `export { }` + * + * Skips (report-only, no fix): + * - `export default function () {}` / `export default class {}` — + * anonymous declarations, no canonical name to assign. + * - `export default ` where the expression isn't a bare + * identifier (e.g. `export default { foo: 1 }`, + * `export default makePlugin(...)`) — choosing a name requires + * human input. + */ + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'suggestion', + docs: { + description: + 'Forbid `export default` — use named exports so the export name is stable across import sites.', + category: 'Stylistic Issues', + recommended: true, + }, + fixable: 'code', + messages: { + noDefaultExport: + 'Avoid `export default` — use a named export so the export name is stable across imports, greppable, and composable with `export * from`.', + noDefaultExportNoFix: + 'Avoid `export default` — the default-exported value is anonymous or a complex expression. Give it a name and switch to `export { }`.', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + + return { + ExportDefaultDeclaration(node) { + const decl = node.declaration + if (!decl) { + return + } + + // `export default function name() {}` / + // `export default class Name {}` — drop the `default` keyword + // and emit the declaration as a named export. + if ( + (decl.type === 'FunctionDeclaration' || + decl.type === 'ClassDeclaration') && + decl.id && + decl.id.type === 'Identifier' + ) { + context.report({ + node, + messageId: 'noDefaultExport', + fix(fixer) { + const declText = sourceCode.getText(decl) + return fixer.replaceText(node, `export ${declText}`) + }, + }) + return + } + + // `export default someIdentifier` — rewrite to + // `export { someIdentifier }`. Only safe when the identifier + // is declared in the same module; we don't try to verify that + // here because the import side will fail loudly if not, and + // the autofix never strips a declaration. + if (decl.type === 'Identifier') { + context.report({ + node, + messageId: 'noDefaultExport', + fix(fixer) { + return fixer.replaceText(node, `export { ${decl.name} }`) + }, + }) + return + } + + // Anonymous declaration or complex expression — report without + // a fix; the human needs to choose a name. + context.report({ + node, + messageId: 'noDefaultExportNoFix', + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-dynamic-import-outside-bundle.mts b/.config/oxlint-plugin/rules/no-dynamic-import-outside-bundle.mts new file mode 100644 index 0000000..55fda97 --- /dev/null +++ b/.config/oxlint-plugin/rules/no-dynamic-import-outside-bundle.mts @@ -0,0 +1,77 @@ +/** + * @fileoverview Ban dynamic `import()` (ImportExpression) in code that + * isn't bundled. The fleet favors static ES6 imports — dynamic import + * is only meaningful when a bundler resolves it statically at build + * time. Scripts under `scripts/` run directly via `node`; nothing + * bundles them, so a dynamic import only adds a runtime async hop for + * no resolution win. + * + * Allowed paths: `src/**`, `.config/**` (bundler configs themselves + * may load tools dynamically via the bundler's API). + * + * No autofix: converting `await import('foo')` to `import 'foo'` + * requires moving the statement to the top of the file and removing + * `await`/destructuring — the bundler-aware AST rewrite is non-trivial + * to do safely. Reporting only. + */ + +import path from 'node:path' + +const DEFAULT_BUNDLED_ROOTS = ['src/', '.config/', 'packages/'] + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'problem', + docs: { + description: + 'Ban dynamic import() outside bundled trees (src/, .config/, packages/).', + category: 'Best Practices', + recommended: true, + }, + messages: { + dynamic: + 'Dynamic import() in {{file}} — favor a static `import` statement at the top of the file. Dynamic import is only valid in bundled code (src/, .config/, packages/). If lazy resolution is required, justify it explicitly.', + }, + schema: [ + { + type: 'object', + properties: { + bundledRoots: { + type: 'array', + items: { type: 'string' }, + description: + 'Path prefixes (relative to repo root) where dynamic import() is allowed.', + }, + }, + additionalProperties: false, + }, + ], + }, + + create(context) { + const options = context.options[0] || {} + const bundledRoots = options.bundledRoots || DEFAULT_BUNDLED_ROOTS + const filename = context.physicalFilename || context.filename + const cwd = context.cwd || process.cwd() + const relative = path.relative(cwd, filename).split(path.sep).join('/') + + const inBundled = bundledRoots.some(root => relative.startsWith(root)) + + if (inBundled) { + return {} + } + + return { + ImportExpression(node) { + context.report({ + node, + messageId: 'dynamic', + data: { file: relative }, + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-fetch-prefer-http-request.mts b/.config/oxlint-plugin/rules/no-fetch-prefer-http-request.mts new file mode 100644 index 0000000..61d14a5 --- /dev/null +++ b/.config/oxlint-plugin/rules/no-fetch-prefer-http-request.mts @@ -0,0 +1,63 @@ +/** + * @fileoverview Per CLAUDE.md "HTTP — never `fetch()`. Use httpJson / + * httpText / httpRequest from @socketsecurity/lib/http-request." + * + * Reports any `fetch(...)` call (global fetch). Does NOT auto-fix + * because the right replacement (`httpJson` vs `httpText` vs + * `httpRequest`) depends on what the caller does with the response — + * a wrong autofix would silently change behavior. Reporting only. + * + * Allowed exceptions (skipped): + * - `globalThis.fetch` — explicit reference (often for monkey-patching + * in tests). + * - Method calls (`obj.fetch(...)`) — those aren't the global. + */ + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'problem', + docs: { + description: + 'Use httpJson / httpText / httpRequest from @socketsecurity/lib/http-request instead of global fetch().', + category: 'Best Practices', + recommended: true, + }, + messages: { + banned: + 'global fetch() — use httpJson / httpText / httpRequest from @socketsecurity/lib/http-request. The right replacement depends on what you do with the response; the lib helpers ship consistent error shapes (HttpError) and JSON/text decoding.', + }, + schema: [], + }, + + create(context) { + return { + CallExpression(node) { + const callee = node.callee + // Only flag direct `fetch(...)` calls (Identifier callee). + if (callee.type !== 'Identifier' || callee.name !== 'fetch') { + return + } + + // Skip if `fetch` is locally shadowed by a parameter / declaration. + // Best-effort: check the scope chain. + const scope = context.getScope ? context.getScope() : undefined + if (scope) { + const variable = scope.references.find( + ref => ref.identifier === callee, + )?.resolved + if (variable && variable.scope.type !== 'global') { + return + } + } + + context.report({ + node, + messageId: 'banned', + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-inline-logger.mts b/.config/oxlint-plugin/rules/no-inline-logger.mts new file mode 100644 index 0000000..afabd00 --- /dev/null +++ b/.config/oxlint-plugin/rules/no-inline-logger.mts @@ -0,0 +1,112 @@ +/** + * @fileoverview Ban inline `getDefaultLogger().(...)`. The + * logger must be hoisted at the top of the file: + * const logger = getDefaultLogger() + * ... + * logger.success('...') + * + * Inline `getDefaultLogger().success(...)` re-resolves the logger on + * every call and reads inconsistently. The hoisted form is the + * fleet-canonical pattern. + * + * Autofix: rewrites `getDefaultLogger().` → `logger.` + * AND inserts the missing pieces in one go: + * + * 1. `import { getDefaultLogger } from '@socketsecurity/lib/logger'` + * — appended after the last existing top-level import (or at the + * top of the file if there are none). + * 2. `const logger = getDefaultLogger()` — appended after the import + * block (so `logger` is hoisted at module scope). + * + * Each inline call site emits its own fix independently. ESLint's + * autofixer dedupes overlapping inserts, so multiple violations in the + * same file collapse the import + hoist into a single insertion. + */ + +import { appendImportFixes, summarizeImportTarget } from './_inject-import.mts' + +const LOGGER_IMPORT_LINE = + "import { getDefaultLogger } from '@socketsecurity/lib/logger'" +const LOGGER_HOIST_LINE = 'const logger = getDefaultLogger()' + +/** @type {import('eslint').Rule.RuleModule} */ +const rule = { + meta: { + type: 'problem', + docs: { + description: + 'Hoist getDefaultLogger() to a const at the top of the file; do not call it inline.', + category: 'Best Practices', + recommended: true, + }, + fixable: 'code', + messages: { + inline: + 'getDefaultLogger() must be hoisted: add `const logger = getDefaultLogger()` near the top of the file and use `logger.{{method}}(...)`.', + }, + schema: [], + }, + + create(context) { + const sourceCode = context.getSourceCode + ? context.getSourceCode() + : context.sourceCode + + let summary + + function ensureSummary() { + if (summary) { + return summary + } + summary = summarizeImportTarget( + sourceCode.ast, + '@socketsecurity/lib/logger', + 'getDefaultLogger', + 'logger', + ) + return summary + } + + return { + MemberExpression(node) { + // Match: getDefaultLogger(). + if (node.property.type !== 'Identifier') { + return + } + const obj = node.object + if ( + obj.type !== 'CallExpression' || + obj.callee.type !== 'Identifier' || + obj.callee.name !== 'getDefaultLogger' || + obj.arguments.length !== 0 + ) { + return + } + + const s = ensureSummary() + + context.report({ + node, + messageId: 'inline', + data: { method: node.property.name }, + fix(fixer) { + // Replace `getDefaultLogger()` (the CallExpression) with + // `logger`. Leaves `.method(...)` intact, so the result is + // `logger.method(...)`. + return [ + fixer.replaceText(obj, 'logger'), + ...appendImportFixes( + s, + fixer, + LOGGER_IMPORT_LINE, + LOGGER_HOIST_LINE, + ), + ] + }, + }) + }, + } + }, +} + +export default rule diff --git a/.config/oxlint-plugin/rules/no-npx-dlx.mts b/.config/oxlint-plugin/rules/no-npx-dlx.mts new file mode 100644 index 0000000..dd754e1 --- /dev/null +++ b/.config/oxlint-plugin/rules/no-npx-dlx.mts @@ -0,0 +1,192 @@ +/* oxlint-disable socket/no-npx-dlx -- this file IS the rule definition; the banned commands are lookup-table data, not real usage. */ + +/** + * @fileoverview Per CLAUDE.md "Tooling" rule: + * + * 🚨 NEVER use `npx`, `pnpm dlx`, or `yarn dlx` — use + * `pnpm exec ` or `pnpm run