Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ All notable changes to this project will be documented in this file. See [commit
* add `npm run bench` script and stale embeddings warning ([#604](https://github.com/optave/codegraph/pull/604))
* bump `commit-and-tag-version`, `tree-sitter-cli`, `web-tree-sitter`, `@commitlint/cli`, `@commitlint/config-conventional` ([#560](https://github.com/optave/codegraph/pull/560), [#561](https://github.com/optave/codegraph/pull/561), [#562](https://github.com/optave/codegraph/pull/562), [#563](https://github.com/optave/codegraph/pull/563), [#564](https://github.com/optave/codegraph/pull/564))

### Notes

* **constants:** `EXTENSIONS` and `IGNORE_DIRS` in the programmatic API are now `Set<string>` (changed during TypeScript migration). Both expose a `.toArray()` convenience method for consumers that need array semantics.

## [3.3.1](https://github.com/optave/codegraph/compare/v3.3.0...v3.3.1) (2026-03-20)

**Incremental rebuild accuracy and post-3.3.0 stabilization.** This patch fixes a critical edge gap in the file watcher's single-file rebuild path where call edges were silently dropped during incremental rebuilds, aligns the native Rust engine's edge builder kind filters with the JS engine for parity, plugs a WASM tree memory leak in native engine typeMap backfill, and restores query performance to pre-3.1.4 levels. Several post-reorganization import path issues are also corrected.
Expand Down
47 changes: 46 additions & 1 deletion src/db/connection.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,32 @@
import { execFileSync } from 'node:child_process';
import fs from 'node:fs';
import path from 'node:path';
import { fileURLToPath } from 'node:url';
import Database from 'better-sqlite3';
import { debug, warn } from '../infrastructure/logger.js';
import { DbError } from '../shared/errors.js';
import type { BetterSqlite3Database } from '../types.js';
import { Repository } from './repository/base.js';
import { SqliteRepository } from './repository/sqlite-repository.js';

/** Lazy-loaded package version (read once from package.json). */
let _packageVersion: string | undefined;
function getPackageVersion(): string {
if (_packageVersion !== undefined) return _packageVersion;
try {
const connDir = path.dirname(fileURLToPath(import.meta.url));
const pkgPath = path.join(connDir, '..', '..', 'package.json');
_packageVersion = (JSON.parse(fs.readFileSync(pkgPath, 'utf-8')) as { version: string })
.version;
} catch {
_packageVersion = '';
}
return _packageVersion;
}

/** Warn once per process when DB version mismatches the running codegraph version. */
let _versionWarned = false;

/** DB instance with optional advisory lock path. */
export type LockedDatabase = BetterSqlite3Database & { __lockPath?: string };

Expand Down Expand Up @@ -60,6 +79,11 @@ export function _resetRepoRootCache(): void {
_cachedRepoRootCwd = undefined;
}

/** Reset the version warning flag (for testing). */
export function _resetVersionWarning(): void {
_versionWarned = false;
}

function isProcessAlive(pid: number): boolean {
try {
process.kill(pid, 0);
Expand Down Expand Up @@ -190,12 +214,33 @@ export function openReadonlyOrFail(customPath?: string): BetterSqlite3Database {
{ file: dbPath },
);
}
return new (
const db = new (
Database as unknown as new (
path: string,
opts?: Record<string, unknown>,
) => BetterSqlite3Database
)(dbPath, { readonly: true });

// Warn once per process if the DB was built with a different codegraph version
if (!_versionWarned) {
try {
const row = db
.prepare<{ value: string }>('SELECT value FROM build_meta WHERE key = ?')
.get('codegraph_version');
const buildVersion = row?.value;
const currentVersion = getPackageVersion();
if (buildVersion && currentVersion && buildVersion !== currentVersion) {
warn(
`DB was built with codegraph v${buildVersion}, running v${currentVersion}. Consider: codegraph build --no-incremental`,
);
}
} catch {
// build_meta table may not exist in older DBs — silently ignore
}
_versionWarned = true;
}

return db;
}

/**
Expand Down
4 changes: 2 additions & 2 deletions src/domain/graph/builder/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import fs from 'node:fs';
import path from 'node:path';
import type BetterSqlite3 from 'better-sqlite3';
import { purgeFilesData } from '../../../db/index.js';
import { warn } from '../../../infrastructure/logger.js';
import { debug, warn } from '../../../infrastructure/logger.js';
import { EXTENSIONS, IGNORE_DIRS } from '../../../shared/constants.js';
import type { BetterSqlite3Database, CodegraphConfig, PathAliases } from '../../../types.js';

Expand Down Expand Up @@ -149,7 +149,7 @@ export function loadPathAliases(rootDir: string): PathAliases {
}
break;
} catch (err: unknown) {
warn(`Failed to parse ${configName}: ${(err as Error).message}`);
debug(`Failed to parse ${configName}: ${(err as Error).message}`);
}
}
return aliases;
Expand Down
5 changes: 4 additions & 1 deletion src/domain/graph/builder/stages/build-edges.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,12 +102,15 @@ function buildImportEdges(
const { fileSymbols, barrelOnlyFiles, rootDir } = ctx;

for (const [relPath, symbols] of fileSymbols) {
if (barrelOnlyFiles.has(relPath)) continue;
const isBarrelOnly = barrelOnlyFiles.has(relPath);
const fileNodeRow = getNodeIdStmt.get(relPath, 'file', relPath, 0);
if (!fileNodeRow) continue;
const fileNodeId = fileNodeRow.id;

for (const imp of symbols.imports) {
// Barrel-only files: only emit reexport edges, skip regular imports
if (isBarrelOnly && !imp.reexport) continue;

const resolvedPath = getResolved(ctx, path.join(rootDir, relPath), imp.source);
const targetRow = getNodeIdStmt.get(resolvedPath, 'file', resolvedPath, 0);
if (!targetRow) continue;
Expand Down
6 changes: 6 additions & 0 deletions src/domain/graph/builder/stages/resolve-imports.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,19 @@ export async function resolveImports(ctx: PipelineContext): Promise<void> {
JOIN nodes n1 ON e.source_id = n1.id
WHERE e.kind = 'reexports' AND n1.kind = 'file'`)
.all() as Array<{ file: string }>;
// Barrel-only files will have edges re-created by buildEdges; delete
// their outgoing edges first to prevent duplicates during incremental builds.
const deleteOutgoingEdges = db.prepare(
'DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)',
);
for (const { file: relPath } of barrelCandidates) {
if (fileSymbols.has(relPath)) continue;
const absPath = path.join(rootDir, relPath);
try {
const symbols = await parseFilesAuto([absPath], rootDir, engineOpts);
const fileSym = symbols.get(relPath);
if (fileSym) {
deleteOutgoingEdges.run(relPath);
fileSymbols.set(relPath, fileSym);
Comment on lines +51 to 62
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Edge deletion is not transactional with edge re-creation

deleteOutgoingEdges.run(relPath) commits immediately to the DB (outside any transaction), but the corresponding reexports edges aren't re-created until buildEdges runs its own db.transaction() later. If the process crashes or buildEdges throws between those two points, the barrel file's outgoing edges are gone from the DB — but because the reexports rows no longer exist, the file won't appear in barrelCandidates on the next incremental run either. The file silently disappears from the barrel graph until the user explicitly runs --no-incremental.

The cleanest fix is to push the deletion into the buildEdges transaction so delete and insert are atomic:

// In buildImportEdges, before inserting reexport edges for a barrel-only file:
if (isBarrelOnly) {
  db.prepare(
    'DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = ?)',
  ).run(relPath);
}

If keeping the deletion in resolveImports is preferred, wrapping the loop body in a savepoint or a db.transaction() call would achieve the same atomicity guarantee.

ctx.barrelOnlyFiles.add(relPath);
const reexports = fileSym.imports.filter((imp: Import) => imp.reexport);
Expand Down
1 change: 1 addition & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ export { sequenceData } from './features/sequence.js';
export { hotspotsData, moduleBoundariesData, structureData } from './features/structure.js';
export { triageData } from './features/triage.js';
export { loadConfig } from './infrastructure/config.js';
export type { ArrayCompatSet } from './shared/constants.js';
export { EXTENSIONS, IGNORE_DIRS } from './shared/constants.js';
export {
AnalysisError,
Expand Down
54 changes: 35 additions & 19 deletions src/shared/constants.ts
Original file line number Diff line number Diff line change
@@ -1,26 +1,42 @@
import path from 'node:path';
import { SUPPORTED_EXTENSIONS } from '../domain/parser.js';

export const IGNORE_DIRS: Set<string> = new Set([
'node_modules',
'.git',
'dist',
'build',
'.next',
'.nuxt',
'.svelte-kit',
'coverage',
'.codegraph',
'__pycache__',
'.tox',
'vendor',
'.venv',
'venv',
'env',
'.env',
]);
/**
* Set with a `.toArray()` convenience method for consumers migrating from
* the pre-3.4 Array-based API (where `.includes()` / `.indexOf()` worked).
*/
export interface ArrayCompatSet<T> extends Set<T> {
toArray(): T[];
}

function withArrayCompat<T>(s: Set<T>): ArrayCompatSet<T> {
const compat = s as ArrayCompatSet<T>;
compat.toArray = () => [...s];
return compat;
}

export const IGNORE_DIRS: ArrayCompatSet<string> = withArrayCompat(
new Set([
'node_modules',
'.git',
'dist',
'build',
'.next',
'.nuxt',
'.svelte-kit',
'coverage',
'.codegraph',
'__pycache__',
'.tox',
'vendor',
'.venv',
'venv',
'env',
'.env',
]),
);

export { SUPPORTED_EXTENSIONS as EXTENSIONS };
export const EXTENSIONS: ArrayCompatSet<string> = withArrayCompat(new Set(SUPPORTED_EXTENSIONS));

export function shouldIgnore(dirName: string): boolean {
return IGNORE_DIRS.has(dirName) || dirName.startsWith('.');
Expand Down
Loading