From 1babae7cc47c5c65d0f515bca5aaee7d73f9152d Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sat, 14 Mar 2026 13:59:46 -0600 Subject: [PATCH 01/12] feat: add Repository abstraction with InMemoryRepository for testing pyramid (3.13) Introduce a Repository base class, InMemoryRepository (Maps-backed, no SQLite), and SqliteRepository (delegates to existing fn(db,...) functions). Includes fluent TestRepoBuilder fixture factory and 105 new tests (51 unit + 54 parity) verifying behavioral equivalence between both implementations. Existing code and tests remain unchanged. Impact: 139 functions changed, 6 affected --- src/db.js | 3 + src/db/repository/base.js | 201 ++++++++ src/db/repository/in-memory-repository.js | 551 ++++++++++++++++++++++ src/db/repository/index.js | 4 +- src/db/repository/sqlite-repository.js | 219 +++++++++ tests/helpers/fixtures.js | 142 ++++++ tests/unit/in-memory-repository.test.js | 541 +++++++++++++++++++++ tests/unit/repository-parity.test.js | 280 +++++++++++ 8 files changed, 1940 insertions(+), 1 deletion(-) create mode 100644 src/db/repository/base.js create mode 100644 src/db/repository/in-memory-repository.js create mode 100644 src/db/repository/sqlite-repository.js create mode 100644 tests/helpers/fixtures.js create mode 100644 tests/unit/in-memory-repository.test.js create mode 100644 tests/unit/repository-parity.test.js diff --git a/src/db.js b/src/db.js index f4de972d..7b67a9cf 100644 --- a/src/db.js +++ b/src/db.js @@ -52,9 +52,12 @@ export { hasCoChanges, hasDataflowTable, hasEmbeddings, + InMemoryRepository, iterateFunctionNodes, listFunctionNodes, purgeFileData, purgeFilesData, + Repository, + SqliteRepository, upsertCoChangeMeta, } from './db/repository/index.js'; diff --git a/src/db/repository/base.js b/src/db/repository/base.js new file mode 100644 index 00000000..d13ffcfa --- /dev/null +++ b/src/db/repository/base.js @@ -0,0 +1,201 @@ +/** + * Abstract Repository base class. + * + * Defines the contract for all graph data access. Every method throws + * "not implemented" by default — concrete subclasses override what they support. + */ +export class Repository { + // ── Node lookups ──────────────────────────────────────────────────── + /** @param {number} id @returns {object|undefined} */ + findNodeById(_id) { + throw new Error('not implemented'); + } + + /** @param {string} file @returns {object[]} */ + findNodesByFile(_file) { + throw new Error('not implemented'); + } + + /** @param {string} fileLike @returns {object[]} */ + findFileNodes(_fileLike) { + throw new Error('not implemented'); + } + + /** @param {string} namePattern @param {object} [opts] @returns {object[]} */ + findNodesWithFanIn(_namePattern, _opts) { + throw new Error('not implemented'); + } + + /** @returns {number} */ + countNodes() { + throw new Error('not implemented'); + } + + /** @returns {number} */ + countEdges() { + throw new Error('not implemented'); + } + + /** @returns {number} */ + countFiles() { + throw new Error('not implemented'); + } + + /** @param {string} name @param {string} kind @param {string} file @param {number} line @returns {number|undefined} */ + getNodeId(_name, _kind, _file, _line) { + throw new Error('not implemented'); + } + + /** @param {string} name @param {string} file @param {number} line @returns {number|undefined} */ + getFunctionNodeId(_name, _file, _line) { + throw new Error('not implemented'); + } + + /** @param {string} file @returns {{ id: number, name: string, kind: string, line: number }[]} */ + bulkNodeIdsByFile(_file) { + throw new Error('not implemented'); + } + + /** @param {number} parentId @returns {object[]} */ + findNodeChildren(_parentId) { + throw new Error('not implemented'); + } + + /** @param {string} scopeName @param {object} [opts] @returns {object[]} */ + findNodesByScope(_scopeName, _opts) { + throw new Error('not implemented'); + } + + /** @param {string} qualifiedName @param {object} [opts] @returns {object[]} */ + findNodeByQualifiedName(_qualifiedName, _opts) { + throw new Error('not implemented'); + } + + /** @param {object} [opts] @returns {object[]} */ + listFunctionNodes(_opts) { + throw new Error('not implemented'); + } + + /** @param {object} [opts] @returns {IterableIterator} */ + iterateFunctionNodes(_opts) { + throw new Error('not implemented'); + } + + /** @param {object} [opts] @returns {object[]} */ + findNodesForTriage(_opts) { + throw new Error('not implemented'); + } + + // ── Edge queries ──────────────────────────────────────────────────── + /** @param {number} nodeId @returns {object[]} */ + findCallees(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object[]} */ + findCallers(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object[]} */ + findDistinctCallers(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object[]} */ + findAllOutgoingEdges(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object[]} */ + findAllIncomingEdges(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {string[]} */ + findCalleeNames(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {string[]} */ + findCallerNames(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {{ file: string, edge_kind: string }[]} */ + findImportTargets(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {{ file: string, edge_kind: string }[]} */ + findImportSources(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object[]} */ + findImportDependents(_nodeId) { + throw new Error('not implemented'); + } + + /** @param {string} file @returns {Set} */ + findCrossFileCallTargets(_file) { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @param {string} file @returns {number} */ + countCrossFileCallers(_nodeId, _file) { + throw new Error('not implemented'); + } + + /** @param {number} classNodeId @returns {Set} */ + getClassHierarchy(_classNodeId) { + throw new Error('not implemented'); + } + + /** @param {string} file @returns {{ caller_name: string, callee_name: string }[]} */ + findIntraFileCallEdges(_file) { + throw new Error('not implemented'); + } + + // ── Graph-read queries ────────────────────────────────────────────── + /** @returns {{ id: number, name: string, kind: string, file: string }[]} */ + getCallableNodes() { + throw new Error('not implemented'); + } + + /** @returns {{ source_id: number, target_id: number }[]} */ + getCallEdges() { + throw new Error('not implemented'); + } + + /** @returns {{ id: number, name: string, file: string }[]} */ + getFileNodesAll() { + throw new Error('not implemented'); + } + + /** @returns {{ source_id: number, target_id: number }[]} */ + getImportEdges() { + throw new Error('not implemented'); + } + + // ── Optional table checks (default: false/undefined) ──────────────── + /** @returns {boolean} */ + hasCfgTables() { + throw new Error('not implemented'); + } + + /** @returns {boolean} */ + hasEmbeddings() { + throw new Error('not implemented'); + } + + /** @returns {boolean} */ + hasDataflowTable() { + throw new Error('not implemented'); + } + + /** @param {number} nodeId @returns {object|undefined} */ + getComplexityForNode(_nodeId) { + throw new Error('not implemented'); + } +} diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js new file mode 100644 index 00000000..4df622fd --- /dev/null +++ b/src/db/repository/in-memory-repository.js @@ -0,0 +1,551 @@ +import { CORE_SYMBOL_KINDS } from '../../kinds.js'; +import { Repository } from './base.js'; + +/** + * Convert a SQL LIKE pattern to a RegExp (case-insensitive). + * Supports `%` (any chars) and `_` (single char). + * @param {string} pattern + * @returns {RegExp} + */ +function likeToRegex(pattern) { + let regex = ''; + for (let i = 0; i < pattern.length; i++) { + const ch = pattern[i]; + if (ch === '\\' && i + 1 < pattern.length) { + // Escaped literal + regex += pattern[++i].replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + } else if (ch === '%') { + regex += '.*'; + } else if (ch === '_') { + regex += '.'; + } else { + regex += ch.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); + } + } + return new RegExp(`^${regex}$`, 'i'); +} + +/** + * In-memory Repository implementation backed by Maps. + * No SQLite dependency — suitable for fast unit tests. + */ +export class InMemoryRepository extends Repository { + #nodes = new Map(); // id → node object + #edges = new Map(); // id → edge object + #complexity = new Map(); // node_id → complexity metrics + #nextNodeId = 1; + #nextEdgeId = 1; + + // ── Mutation (test setup only) ──────────────────────────────────── + + /** + * Add a node. Returns the auto-assigned id. + * @param {object} attrs - { name, kind, file, line, end_line?, parent_id?, exported?, qualified_name?, scope?, visibility?, role? } + * @returns {number} + */ + addNode(attrs) { + const id = this.#nextNodeId++; + this.#nodes.set(id, { + id, + name: attrs.name, + kind: attrs.kind, + file: attrs.file, + line: attrs.line, + end_line: attrs.end_line ?? null, + parent_id: attrs.parent_id ?? null, + exported: attrs.exported ?? null, + qualified_name: attrs.qualified_name ?? null, + scope: attrs.scope ?? null, + visibility: attrs.visibility ?? null, + role: attrs.role ?? null, + }); + return id; + } + + /** + * Add an edge. Returns the auto-assigned id. + * @param {object} attrs - { source_id, target_id, kind, confidence?, dynamic? } + * @returns {number} + */ + addEdge(attrs) { + const id = this.#nextEdgeId++; + this.#edges.set(id, { + id, + source_id: attrs.source_id, + target_id: attrs.target_id, + kind: attrs.kind, + confidence: attrs.confidence ?? null, + dynamic: attrs.dynamic ?? 0, + }); + return id; + } + + /** + * Add complexity metrics for a node. + * @param {number} nodeId + * @param {object} metrics - { cognitive, cyclomatic, max_nesting, maintainability_index?, halstead_volume? } + */ + addComplexity(nodeId, metrics) { + this.#complexity.set(nodeId, { + cognitive: metrics.cognitive ?? 0, + cyclomatic: metrics.cyclomatic ?? 0, + max_nesting: metrics.max_nesting ?? 0, + maintainability_index: metrics.maintainability_index ?? 0, + halstead_volume: metrics.halstead_volume ?? 0, + }); + } + + // ── Node lookups ────────────────────────────────────────────────── + + findNodeById(id) { + return this.#nodes.get(id) ?? undefined; + } + + findNodesByFile(file) { + return [...this.#nodes.values()] + .filter((n) => n.file === file && n.kind !== 'file') + .sort((a, b) => a.line - b.line); + } + + findFileNodes(fileLike) { + const re = likeToRegex(fileLike); + return [...this.#nodes.values()].filter((n) => n.kind === 'file' && re.test(n.file)); + } + + findNodesWithFanIn(namePattern, opts = {}) { + const re = likeToRegex(namePattern); + let nodes = [...this.#nodes.values()].filter((n) => re.test(n.name)); + + if (opts.kinds) { + nodes = nodes.filter((n) => opts.kinds.includes(n.kind)); + } + if (opts.file) { + const fileRe = likeToRegex(`%${opts.file}%`); + nodes = nodes.filter((n) => fileRe.test(n.file)); + } + + // Compute fan-in per node + const fanInMap = this.#computeFanIn(); + return nodes.map((n) => ({ ...n, fan_in: fanInMap.get(n.id) ?? 0 })); + } + + countNodes() { + return this.#nodes.size; + } + + countEdges() { + return this.#edges.size; + } + + countFiles() { + const files = new Set(); + for (const n of this.#nodes.values()) { + files.add(n.file); + } + return files.size; + } + + getNodeId(name, kind, file, line) { + for (const n of this.#nodes.values()) { + if (n.name === name && n.kind === kind && n.file === file && n.line === line) { + return n.id; + } + } + return undefined; + } + + getFunctionNodeId(name, file, line) { + for (const n of this.#nodes.values()) { + if ( + n.name === name && + (n.kind === 'function' || n.kind === 'method') && + n.file === file && + n.line === line + ) { + return n.id; + } + } + return undefined; + } + + bulkNodeIdsByFile(file) { + return [...this.#nodes.values()] + .filter((n) => n.file === file) + .map((n) => ({ id: n.id, name: n.name, kind: n.kind, line: n.line })); + } + + findNodeChildren(parentId) { + return [...this.#nodes.values()] + .filter((n) => n.parent_id === parentId) + .sort((a, b) => a.line - b.line) + .map((n) => ({ + name: n.name, + kind: n.kind, + line: n.line, + end_line: n.end_line, + qualified_name: n.qualified_name, + scope: n.scope, + visibility: n.visibility, + })); + } + + findNodesByScope(scopeName, opts = {}) { + let nodes = [...this.#nodes.values()].filter((n) => n.scope === scopeName); + + if (opts.kind) { + nodes = nodes.filter((n) => n.kind === opts.kind); + } + if (opts.file) { + const fileRe = likeToRegex(`%${opts.file}%`); + nodes = nodes.filter((n) => fileRe.test(n.file)); + } + + return nodes.sort((a, b) => a.file.localeCompare(b.file) || a.line - b.line); + } + + findNodeByQualifiedName(qualifiedName, opts = {}) { + let nodes = [...this.#nodes.values()].filter((n) => n.qualified_name === qualifiedName); + + if (opts.file) { + const fileRe = likeToRegex(`%${opts.file}%`); + nodes = nodes.filter((n) => fileRe.test(n.file)); + } + + return nodes.sort((a, b) => a.file.localeCompare(b.file) || a.line - b.line); + } + + listFunctionNodes(opts = {}) { + return [...this.#iterateFunctionNodesImpl(opts)]; + } + + *iterateFunctionNodes(opts = {}) { + yield* this.#iterateFunctionNodesImpl(opts); + } + + findNodesForTriage(opts = {}) { + const kindsToUse = opts.kind ? [opts.kind] : ['function', 'method', 'class']; + let nodes = [...this.#nodes.values()].filter((n) => kindsToUse.includes(n.kind)); + + if (opts.noTests) { + nodes = nodes.filter((n) => !n.file.includes('.test.') && !n.file.includes('.spec.')); + } + if (opts.file) { + const fileRe = likeToRegex(`%${opts.file}%`); + nodes = nodes.filter((n) => fileRe.test(n.file)); + } + if (opts.role) { + nodes = nodes.filter((n) => n.role === opts.role); + } + + const fanInMap = this.#computeFanIn(); + return nodes + .sort((a, b) => a.file.localeCompare(b.file) || a.line - b.line) + .map((n) => { + const cx = this.#complexity.get(n.id); + return { + id: n.id, + name: n.name, + kind: n.kind, + file: n.file, + line: n.line, + end_line: n.end_line, + role: n.role, + fan_in: fanInMap.get(n.id) ?? 0, + cognitive: cx?.cognitive ?? 0, + mi: cx?.maintainability_index ?? 0, + cyclomatic: cx?.cyclomatic ?? 0, + max_nesting: cx?.max_nesting ?? 0, + churn: 0, // no co-change data in-memory + }; + }); + } + + // ── Edge queries ────────────────────────────────────────────────── + + findCallees(nodeId) { + const seen = new Set(); + const results = []; + for (const e of this.#edges.values()) { + if (e.source_id === nodeId && e.kind === 'calls' && !seen.has(e.target_id)) { + seen.add(e.target_id); + const n = this.#nodes.get(e.target_id); + if (n) + results.push({ + id: n.id, + name: n.name, + kind: n.kind, + file: n.file, + line: n.line, + end_line: n.end_line, + }); + } + } + return results; + } + + findCallers(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && e.kind === 'calls') { + const n = this.#nodes.get(e.source_id); + if (n) results.push({ id: n.id, name: n.name, kind: n.kind, file: n.file, line: n.line }); + } + } + return results; + } + + findDistinctCallers(nodeId) { + const seen = new Set(); + const results = []; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && e.kind === 'calls' && !seen.has(e.source_id)) { + seen.add(e.source_id); + const n = this.#nodes.get(e.source_id); + if (n) results.push({ id: n.id, name: n.name, kind: n.kind, file: n.file, line: n.line }); + } + } + return results; + } + + findAllOutgoingEdges(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.source_id === nodeId) { + const n = this.#nodes.get(e.target_id); + if (n) + results.push({ + name: n.name, + kind: n.kind, + file: n.file, + line: n.line, + edge_kind: e.kind, + }); + } + } + return results; + } + + findAllIncomingEdges(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId) { + const n = this.#nodes.get(e.source_id); + if (n) + results.push({ + name: n.name, + kind: n.kind, + file: n.file, + line: n.line, + edge_kind: e.kind, + }); + } + } + return results; + } + + findCalleeNames(nodeId) { + const names = new Set(); + for (const e of this.#edges.values()) { + if (e.source_id === nodeId && e.kind === 'calls') { + const n = this.#nodes.get(e.target_id); + if (n) names.add(n.name); + } + } + return [...names].sort(); + } + + findCallerNames(nodeId) { + const names = new Set(); + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && e.kind === 'calls') { + const n = this.#nodes.get(e.source_id); + if (n) names.add(n.name); + } + } + return [...names].sort(); + } + + findImportTargets(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.source_id === nodeId && (e.kind === 'imports' || e.kind === 'imports-type')) { + const n = this.#nodes.get(e.target_id); + if (n) results.push({ file: n.file, edge_kind: e.kind }); + } + } + return results; + } + + findImportSources(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && (e.kind === 'imports' || e.kind === 'imports-type')) { + const n = this.#nodes.get(e.source_id); + if (n) results.push({ file: n.file, edge_kind: e.kind }); + } + } + return results; + } + + findImportDependents(nodeId) { + const results = []; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && (e.kind === 'imports' || e.kind === 'imports-type')) { + const n = this.#nodes.get(e.source_id); + if (n) results.push({ ...n }); + } + } + return results; + } + + findCrossFileCallTargets(file) { + const targets = new Set(); + for (const e of this.#edges.values()) { + if (e.kind !== 'calls') continue; + const caller = this.#nodes.get(e.source_id); + const target = this.#nodes.get(e.target_id); + if (caller && target && target.file === file && caller.file !== file) { + targets.add(e.target_id); + } + } + return targets; + } + + countCrossFileCallers(nodeId, file) { + let count = 0; + for (const e of this.#edges.values()) { + if (e.target_id === nodeId && e.kind === 'calls') { + const caller = this.#nodes.get(e.source_id); + if (caller && caller.file !== file) count++; + } + } + return count; + } + + getClassHierarchy(classNodeId) { + const ancestors = new Set(); + const queue = [classNodeId]; + while (queue.length > 0) { + const current = queue.shift(); + for (const e of this.#edges.values()) { + if (e.source_id === current && e.kind === 'extends') { + const target = this.#nodes.get(e.target_id); + if (target && !ancestors.has(target.id)) { + ancestors.add(target.id); + queue.push(target.id); + } + } + } + } + return ancestors; + } + + findIntraFileCallEdges(file) { + const results = []; + for (const e of this.#edges.values()) { + if (e.kind !== 'calls') continue; + const caller = this.#nodes.get(e.source_id); + const callee = this.#nodes.get(e.target_id); + if (caller && callee && caller.file === file && callee.file === file) { + results.push({ caller_name: caller.name, callee_name: callee.name }); + } + } + return results.sort((a, b) => { + const callerA = [...this.#nodes.values()].find( + (n) => n.name === a.caller_name && n.file === file, + ); + const callerB = [...this.#nodes.values()].find( + (n) => n.name === b.caller_name && n.file === file, + ); + return (callerA?.line ?? 0) - (callerB?.line ?? 0); + }); + } + + // ── Graph-read queries ──────────────────────────────────────────── + + getCallableNodes() { + return [...this.#nodes.values()] + .filter((n) => CORE_SYMBOL_KINDS.includes(n.kind)) + .map((n) => ({ id: n.id, name: n.name, kind: n.kind, file: n.file })); + } + + getCallEdges() { + return [...this.#edges.values()] + .filter((e) => e.kind === 'calls') + .map((e) => ({ source_id: e.source_id, target_id: e.target_id })); + } + + getFileNodesAll() { + return [...this.#nodes.values()] + .filter((n) => n.kind === 'file') + .map((n) => ({ id: n.id, name: n.name, file: n.file })); + } + + getImportEdges() { + return [...this.#edges.values()] + .filter((e) => e.kind === 'imports' || e.kind === 'imports-type') + .map((e) => ({ source_id: e.source_id, target_id: e.target_id })); + } + + // ── Optional table checks ───────────────────────────────────────── + + hasCfgTables() { + return false; + } + + hasEmbeddings() { + return false; + } + + hasDataflowTable() { + return false; + } + + getComplexityForNode(nodeId) { + return this.#complexity.get(nodeId); + } + + // ── Private helpers ─────────────────────────────────────────────── + + /** Compute fan-in (incoming 'calls' edge count) for all nodes. */ + #computeFanIn() { + const fanIn = new Map(); + for (const e of this.#edges.values()) { + if (e.kind === 'calls') { + fanIn.set(e.target_id, (fanIn.get(e.target_id) ?? 0) + 1); + } + } + return fanIn; + } + + /** Internal generator for function/method/class listing with filters. */ + *#iterateFunctionNodesImpl(opts = {}) { + let nodes = [...this.#nodes.values()].filter((n) => + ['function', 'method', 'class'].includes(n.kind), + ); + + if (opts.file) { + const fileRe = likeToRegex(`%${opts.file}%`); + nodes = nodes.filter((n) => fileRe.test(n.file)); + } + if (opts.pattern) { + const patternRe = likeToRegex(`%${opts.pattern}%`); + nodes = nodes.filter((n) => patternRe.test(n.name)); + } + if (opts.noTests) { + nodes = nodes.filter((n) => !n.file.includes('.test.') && !n.file.includes('.spec.')); + } + + nodes.sort((a, b) => a.file.localeCompare(b.file) || a.line - b.line); + for (const n of nodes) { + yield { + name: n.name, + kind: n.kind, + file: n.file, + line: n.line, + end_line: n.end_line, + role: n.role, + }; + } + } +} diff --git a/src/db/repository/index.js b/src/db/repository/index.js index c5041408..27483ae7 100644 --- a/src/db/repository/index.js +++ b/src/db/repository/index.js @@ -1,10 +1,10 @@ // Barrel re-export for repository/ modules. +export { Repository } from './base.js'; export { purgeFileData, purgeFilesData } from './build-stmts.js'; export { cachedStmt } from './cached-stmt.js'; export { deleteCfgForNode, getCfgBlocks, getCfgEdges, hasCfgTables } from './cfg.js'; export { getCoChangeMeta, hasCoChanges, upsertCoChangeMeta } from './cochange.js'; - export { getComplexityForNode } from './complexity.js'; export { hasDataflowTable } from './dataflow.js'; export { @@ -25,6 +25,7 @@ export { } from './edges.js'; export { getEmbeddingCount, getEmbeddingMeta, hasEmbeddings } from './embeddings.js'; export { getCallableNodes, getCallEdges, getFileNodesAll, getImportEdges } from './graph-read.js'; +export { InMemoryRepository } from './in-memory-repository.js'; export { bulkNodeIdsByFile, countEdges, @@ -43,3 +44,4 @@ export { iterateFunctionNodes, listFunctionNodes, } from './nodes.js'; +export { SqliteRepository } from './sqlite-repository.js'; diff --git a/src/db/repository/sqlite-repository.js b/src/db/repository/sqlite-repository.js new file mode 100644 index 00000000..c6b556e9 --- /dev/null +++ b/src/db/repository/sqlite-repository.js @@ -0,0 +1,219 @@ +import { Repository } from './base.js'; +import { hasCfgTables } from './cfg.js'; +import { getComplexityForNode } from './complexity.js'; +import { hasDataflowTable } from './dataflow.js'; +import { + countCrossFileCallers, + findAllIncomingEdges, + findAllOutgoingEdges, + findCalleeNames, + findCallees, + findCallerNames, + findCallers, + findCrossFileCallTargets, + findDistinctCallers, + findImportDependents, + findImportSources, + findImportTargets, + findIntraFileCallEdges, + getClassHierarchy, +} from './edges.js'; +import { hasEmbeddings } from './embeddings.js'; +import { getCallableNodes, getCallEdges, getFileNodesAll, getImportEdges } from './graph-read.js'; +import { + bulkNodeIdsByFile, + countEdges, + countFiles, + countNodes, + findFileNodes, + findNodeById, + findNodeByQualifiedName, + findNodeChildren, + findNodesByFile, + findNodesByScope, + findNodesForTriage, + findNodesWithFanIn, + getFunctionNodeId, + getNodeId, + iterateFunctionNodes, + listFunctionNodes, +} from './nodes.js'; + +/** + * SqliteRepository — wraps existing `fn(db, ...)` repository functions + * behind the Repository interface so callers can use `repo.method(...)`. + */ +export class SqliteRepository extends Repository { + #db; + + /** @param {object} db - better-sqlite3 Database instance */ + constructor(db) { + super(); + this.#db = db; + } + + /** Expose the underlying db for code that still needs raw access. */ + get db() { + return this.#db; + } + + // ── Node lookups ────────────────────────────────────────────────── + + findNodeById(id) { + return findNodeById(this.#db, id); + } + + findNodesByFile(file) { + return findNodesByFile(this.#db, file); + } + + findFileNodes(fileLike) { + return findFileNodes(this.#db, fileLike); + } + + findNodesWithFanIn(namePattern, opts) { + return findNodesWithFanIn(this.#db, namePattern, opts); + } + + countNodes() { + return countNodes(this.#db); + } + + countEdges() { + return countEdges(this.#db); + } + + countFiles() { + return countFiles(this.#db); + } + + getNodeId(name, kind, file, line) { + return getNodeId(this.#db, name, kind, file, line); + } + + getFunctionNodeId(name, file, line) { + return getFunctionNodeId(this.#db, name, file, line); + } + + bulkNodeIdsByFile(file) { + return bulkNodeIdsByFile(this.#db, file); + } + + findNodeChildren(parentId) { + return findNodeChildren(this.#db, parentId); + } + + findNodesByScope(scopeName, opts) { + return findNodesByScope(this.#db, scopeName, opts); + } + + findNodeByQualifiedName(qualifiedName, opts) { + return findNodeByQualifiedName(this.#db, qualifiedName, opts); + } + + listFunctionNodes(opts) { + return listFunctionNodes(this.#db, opts); + } + + iterateFunctionNodes(opts) { + return iterateFunctionNodes(this.#db, opts); + } + + findNodesForTriage(opts) { + return findNodesForTriage(this.#db, opts); + } + + // ── Edge queries ────────────────────────────────────────────────── + + findCallees(nodeId) { + return findCallees(this.#db, nodeId); + } + + findCallers(nodeId) { + return findCallers(this.#db, nodeId); + } + + findDistinctCallers(nodeId) { + return findDistinctCallers(this.#db, nodeId); + } + + findAllOutgoingEdges(nodeId) { + return findAllOutgoingEdges(this.#db, nodeId); + } + + findAllIncomingEdges(nodeId) { + return findAllIncomingEdges(this.#db, nodeId); + } + + findCalleeNames(nodeId) { + return findCalleeNames(this.#db, nodeId); + } + + findCallerNames(nodeId) { + return findCallerNames(this.#db, nodeId); + } + + findImportTargets(nodeId) { + return findImportTargets(this.#db, nodeId); + } + + findImportSources(nodeId) { + return findImportSources(this.#db, nodeId); + } + + findImportDependents(nodeId) { + return findImportDependents(this.#db, nodeId); + } + + findCrossFileCallTargets(file) { + return findCrossFileCallTargets(this.#db, file); + } + + countCrossFileCallers(nodeId, file) { + return countCrossFileCallers(this.#db, nodeId, file); + } + + getClassHierarchy(classNodeId) { + return getClassHierarchy(this.#db, classNodeId); + } + + findIntraFileCallEdges(file) { + return findIntraFileCallEdges(this.#db, file); + } + + // ── Graph-read queries ──────────────────────────────────────────── + + getCallableNodes() { + return getCallableNodes(this.#db); + } + + getCallEdges() { + return getCallEdges(this.#db); + } + + getFileNodesAll() { + return getFileNodesAll(this.#db); + } + + getImportEdges() { + return getImportEdges(this.#db); + } + + // ── Optional table checks ───────────────────────────────────────── + + hasCfgTables() { + return hasCfgTables(this.#db); + } + + hasEmbeddings() { + return hasEmbeddings(this.#db); + } + + hasDataflowTable() { + return hasDataflowTable(this.#db); + } + + getComplexityForNode(nodeId) { + return getComplexityForNode(this.#db, nodeId); + } +} diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js new file mode 100644 index 00000000..389b4880 --- /dev/null +++ b/tests/helpers/fixtures.js @@ -0,0 +1,142 @@ +import { InMemoryRepository } from '../../src/db/repository/in-memory-repository.js'; + +/** + * Fluent builder for constructing test graphs quickly. + * + * Usage: + * const { repo, ids } = createTestRepo() + * .fn('authenticate', 'auth.js', 10) + * .fn('authMiddleware', 'middleware.js', 5) + * .calls('authMiddleware', 'authenticate') + * .build(); + */ +class TestRepoBuilder { + #pending = { nodes: [], edges: [], complexity: [] }; + #nameIndex = new Map(); // name → { kind, file, line, ... } + + /** + * Add a function node. + * @param {string} name + * @param {string} file + * @param {number} line + * @param {object} [extra] - Additional node attrs (role, end_line, scope, etc.) + */ + fn(name, file, line, extra = {}) { + return this.#addNode(name, 'function', file, line, extra); + } + + /** + * Add a method node. + */ + method(name, file, line, extra = {}) { + return this.#addNode(name, 'method', file, line, extra); + } + + /** + * Add a class node. + */ + cls(name, file, line, extra = {}) { + return this.#addNode(name, 'class', file, line, extra); + } + + /** + * Add a file node. + */ + file(filePath) { + return this.#addNode(filePath, 'file', filePath, 0); + } + + /** + * Add an arbitrary node. + */ + node(name, kind, file, line, extra = {}) { + return this.#addNode(name, kind, file, line, extra); + } + + /** + * Add a 'calls' edge between two named nodes. + */ + calls(sourceName, targetName) { + this.#pending.edges.push({ source: sourceName, target: targetName, kind: 'calls' }); + return this; + } + + /** + * Add an 'imports' edge. + */ + imports(sourceName, targetName) { + this.#pending.edges.push({ source: sourceName, target: targetName, kind: 'imports' }); + return this; + } + + /** + * Add an 'extends' edge. + */ + extends(sourceName, targetName) { + this.#pending.edges.push({ source: sourceName, target: targetName, kind: 'extends' }); + return this; + } + + /** + * Add an edge of any kind. + */ + edge(sourceName, targetName, kind) { + this.#pending.edges.push({ source: sourceName, target: targetName, kind }); + return this; + } + + /** + * Add complexity metrics for a named node. + */ + complexity(name, metrics) { + this.#pending.complexity.push({ name, metrics }); + return this; + } + + /** + * Build the InMemoryRepository and return { repo, ids }. + * `ids` maps node names to their auto-assigned IDs. + */ + build() { + const repo = new InMemoryRepository(); + const ids = new Map(); + + // Add nodes + for (const n of this.#pending.nodes) { + const id = repo.addNode(n); + ids.set(n.name, id); + } + + // Add edges + for (const e of this.#pending.edges) { + const sourceId = ids.get(e.source); + const targetId = ids.get(e.target); + if (sourceId == null) throw new Error(`Unknown source node: "${e.source}"`); + if (targetId == null) throw new Error(`Unknown target node: "${e.target}"`); + repo.addEdge({ source_id: sourceId, target_id: targetId, kind: e.kind }); + } + + // Add complexity + for (const c of this.#pending.complexity) { + const nodeId = ids.get(c.name); + if (nodeId == null) throw new Error(`Unknown node for complexity: "${c.name}"`); + repo.addComplexity(nodeId, c.metrics); + } + + return { repo, ids }; + } + + #addNode(name, kind, file, line, extra = {}) { + this.#pending.nodes.push({ name, kind, file, line, ...extra }); + this.#nameIndex.set(name, { kind, file, line }); + return this; + } +} + +/** + * Create a new TestRepoBuilder. + * @returns {TestRepoBuilder} + */ +export function createTestRepo() { + return new TestRepoBuilder(); +} diff --git a/tests/unit/in-memory-repository.test.js b/tests/unit/in-memory-repository.test.js new file mode 100644 index 00000000..0ca30857 --- /dev/null +++ b/tests/unit/in-memory-repository.test.js @@ -0,0 +1,541 @@ +import { describe, expect, it } from 'vitest'; +import { InMemoryRepository } from '../../src/db/repository/in-memory-repository.js'; +import { createTestRepo } from '../helpers/fixtures.js'; + +describe('InMemoryRepository', () => { + // ── Test graph ────────────────────────────────────────────────────── + // foo (function, src/foo.js:1, core) ← called by bar, Baz + // bar (method, src/bar.js:10, utility) → calls foo + // Baz (class, src/baz.js:20, entry) → calls foo + // qux (interface, src/qux.js:30) + // testFn (function, tests/foo.test.js:1) + function makeRepo() { + return createTestRepo() + .fn('foo', 'src/foo.js', 1, { role: 'core' }) + .method('bar', 'src/bar.js', 10, { role: 'utility' }) + .cls('Baz', 'src/baz.js', 20, { role: 'entry' }) + .node('qux', 'interface', 'src/qux.js', 30) + .fn('testFn', 'tests/foo.test.js', 1) + .calls('bar', 'foo') + .calls('Baz', 'foo') + .complexity('foo', { cognitive: 5, cyclomatic: 3, max_nesting: 2 }) + .build(); + } + + describe('countNodes / countEdges / countFiles', () => { + it('counts correctly', () => { + const { repo } = makeRepo(); + expect(repo.countNodes()).toBe(5); + expect(repo.countEdges()).toBe(2); + expect(repo.countFiles()).toBe(5); + }); + }); + + describe('findNodeById', () => { + it('returns node by id', () => { + const { repo, ids } = makeRepo(); + const node = repo.findNodeById(ids.get('foo')); + expect(node).toBeDefined(); + expect(node.name).toBe('foo'); + expect(node.kind).toBe('function'); + }); + + it('returns undefined for missing id', () => { + const { repo } = makeRepo(); + expect(repo.findNodeById(9999)).toBeUndefined(); + }); + }); + + describe('findNodesByFile', () => { + it('returns non-file nodes for a file, sorted by line', () => { + const { repo } = makeRepo(); + const nodes = repo.findNodesByFile('src/foo.js'); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('foo'); + }); + + it('excludes file-kind nodes', () => { + const { repo } = createTestRepo().file('src/app.js').fn('main', 'src/app.js', 1).build(); + const nodes = repo.findNodesByFile('src/app.js'); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('main'); + }); + }); + + describe('findFileNodes', () => { + it('finds file-kind nodes matching LIKE pattern', () => { + const { repo } = createTestRepo() + .file('src/app.js') + .file('src/utils.js') + .fn('main', 'src/app.js', 1) + .build(); + const nodes = repo.findFileNodes('%app%'); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('src/app.js'); + }); + }); + + describe('findNodesWithFanIn', () => { + it('returns nodes with fan-in count', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesWithFanIn('%foo%'); + const foo = rows.find((r) => r.name === 'foo'); + expect(foo).toBeDefined(); + expect(foo.fan_in).toBe(2); + }); + + it('filters by kinds', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesWithFanIn('%foo%', { kinds: ['method'] }); + expect(rows.length).toBe(0); + }); + + it('filters by file', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesWithFanIn('%foo%', { file: 'src' }); + expect(rows.every((r) => r.file.includes('src'))).toBe(true); + }); + }); + + describe('getNodeId / getFunctionNodeId', () => { + it('getNodeId returns id for exact tuple match', () => { + const { repo, ids } = makeRepo(); + expect(repo.getNodeId('foo', 'function', 'src/foo.js', 1)).toBe(ids.get('foo')); + }); + + it('getNodeId returns undefined for no match', () => { + const { repo } = makeRepo(); + expect(repo.getNodeId('nope', 'function', 'x.js', 1)).toBeUndefined(); + }); + + it('getFunctionNodeId restricts to function/method', () => { + const { repo, ids } = makeRepo(); + expect(repo.getFunctionNodeId('foo', 'src/foo.js', 1)).toBe(ids.get('foo')); + expect(repo.getFunctionNodeId('Baz', 'src/baz.js', 20)).toBeUndefined(); // class, not function + }); + }); + + describe('bulkNodeIdsByFile', () => { + it('returns all nodes in a file', () => { + const { repo } = makeRepo(); + const rows = repo.bulkNodeIdsByFile('src/foo.js'); + expect(rows.length).toBe(1); + expect(rows[0].name).toBe('foo'); + }); + }); + + describe('findNodeChildren', () => { + it('finds children by parent_id', () => { + const repo2 = new InMemoryRepository(); + const classId = repo2.addNode({ + name: 'MyClass', + kind: 'class', + file: 'src/cls.js', + line: 1, + }); + repo2.addNode({ + name: 'doThing', + kind: 'method', + file: 'src/cls.js', + line: 5, + parent_id: classId, + }); + const children = repo2.findNodeChildren(classId); + expect(children.length).toBe(1); + expect(children[0].name).toBe('doThing'); + }); + }); + + describe('findNodesByScope', () => { + it('filters by scope name', () => { + const repo = new InMemoryRepository(); + repo.addNode({ name: 'MyClass', kind: 'class', file: 'src/cls.js', line: 1 }); + repo.addNode({ + name: 'doThing', + kind: 'method', + file: 'src/cls.js', + line: 5, + scope: 'MyClass', + }); + repo.addNode({ + name: 'helper', + kind: 'function', + file: 'src/cls.js', + line: 20, + scope: 'MyClass', + }); + repo.addNode({ name: 'other', kind: 'function', file: 'src/other.js', line: 1 }); + + const scoped = repo.findNodesByScope('MyClass'); + expect(scoped.length).toBe(2); + }); + + it('filters by scope + kind', () => { + const repo = new InMemoryRepository(); + repo.addNode({ + name: 'doThing', + kind: 'method', + file: 'src/cls.js', + line: 5, + scope: 'MyClass', + }); + repo.addNode({ + name: 'helper', + kind: 'function', + file: 'src/cls.js', + line: 20, + scope: 'MyClass', + }); + + const methods = repo.findNodesByScope('MyClass', { kind: 'method' }); + expect(methods.length).toBe(1); + expect(methods[0].name).toBe('doThing'); + }); + }); + + describe('findNodeByQualifiedName', () => { + it('finds nodes by qualified name', () => { + const repo = new InMemoryRepository(); + repo.addNode({ + name: 'format', + kind: 'method', + file: 'src/a.js', + line: 10, + qualified_name: 'DateHelper.format', + }); + repo.addNode({ + name: 'format', + kind: 'method', + file: 'src/b.js', + line: 20, + qualified_name: 'DateHelper.format', + }); + + const nodes = repo.findNodeByQualifiedName('DateHelper.format'); + expect(nodes.length).toBe(2); + }); + + it('filters by file', () => { + const repo = new InMemoryRepository(); + repo.addNode({ + name: 'format', + kind: 'method', + file: 'src/a.js', + line: 10, + qualified_name: 'DateHelper.format', + }); + repo.addNode({ + name: 'format', + kind: 'method', + file: 'src/b.js', + line: 20, + qualified_name: 'DateHelper.format', + }); + + const nodes = repo.findNodeByQualifiedName('DateHelper.format', { file: 'a.js' }); + expect(nodes.length).toBe(1); + }); + }); + + describe('listFunctionNodes / iterateFunctionNodes', () => { + it('returns function/method/class nodes', () => { + const { repo } = makeRepo(); + const rows = repo.listFunctionNodes(); + expect(rows.length).toBe(4); // foo, bar, Baz, testFn + expect(rows.every((r) => ['function', 'method', 'class'].includes(r.kind))).toBe(true); + }); + + it('filters by file', () => { + const { repo } = makeRepo(); + const rows = repo.listFunctionNodes({ file: 'foo' }); + expect(rows.every((r) => r.file.includes('foo'))).toBe(true); + }); + + it('filters by pattern', () => { + const { repo } = makeRepo(); + const rows = repo.listFunctionNodes({ pattern: 'Baz' }); + expect(rows.length).toBe(1); + expect(rows[0].name).toBe('Baz'); + }); + + it('excludes test files when noTests is set', () => { + const { repo } = makeRepo(); + const rows = repo.listFunctionNodes({ noTests: true }); + expect(rows.every((r) => !r.file.includes('.test.'))).toBe(true); + expect(rows.length).toBe(3); + }); + + it('orders by file, line', () => { + const { repo } = makeRepo(); + const rows = repo.listFunctionNodes(); + for (let i = 1; i < rows.length; i++) { + const prev = `${rows[i - 1].file}:${String(rows[i - 1].line).padStart(6, '0')}`; + const curr = `${rows[i].file}:${String(rows[i].line).padStart(6, '0')}`; + expect(prev <= curr).toBe(true); + } + }); + + it('iterateFunctionNodes returns an iterator', () => { + const { repo } = makeRepo(); + const rows = [...repo.iterateFunctionNodes()]; + expect(rows.length).toBe(4); + }); + + it('iterateFunctionNodes respects filters', () => { + const { repo } = makeRepo(); + const rows = [...repo.iterateFunctionNodes({ noTests: true })]; + expect(rows.length).toBe(3); + }); + }); + + describe('findNodesForTriage', () => { + it('returns nodes with triage signals', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesForTriage(); + expect(rows.length).toBe(4); + const foo = rows.find((r) => r.name === 'foo'); + expect(foo.fan_in).toBe(2); + expect(foo.cognitive).toBe(5); + }); + + it('excludes test files when noTests is set', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesForTriage({ noTests: true }); + expect(rows.every((r) => !r.file.includes('.test.'))).toBe(true); + }); + + it('filters by kind', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesForTriage({ kind: 'class' }); + expect(rows.length).toBe(1); + expect(rows[0].name).toBe('Baz'); + }); + + it('filters by role', () => { + const { repo } = makeRepo(); + const rows = repo.findNodesForTriage({ role: 'core' }); + expect(rows.length).toBe(1); + expect(rows[0].name).toBe('foo'); + }); + }); + + // ── Edge queries ────────────────────────────────────────────────── + + describe('findCallees / findCallers', () => { + it('finds callees', () => { + const { repo, ids } = makeRepo(); + const callees = repo.findCallees(ids.get('bar')); + expect(callees.length).toBe(1); + expect(callees[0].name).toBe('foo'); + }); + + it('finds callers', () => { + const { repo, ids } = makeRepo(); + const callers = repo.findCallers(ids.get('foo')); + expect(callers.length).toBe(2); + const names = callers.map((c) => c.name).sort(); + expect(names).toEqual(['Baz', 'bar']); + }); + }); + + describe('findDistinctCallers', () => { + it('deduplicates callers', () => { + const { repo, ids } = createTestRepo() + .fn('target', 'src/t.js', 1) + .fn('caller', 'src/c.js', 1) + .calls('caller', 'target') + .calls('caller', 'target') // duplicate edge + .build(); + const callers = repo.findDistinctCallers(ids.get('target')); + expect(callers.length).toBe(1); + }); + }); + + describe('findAllOutgoingEdges / findAllIncomingEdges', () => { + it('returns all outgoing edges with edge_kind', () => { + const { repo, ids } = makeRepo(); + const edges = repo.findAllOutgoingEdges(ids.get('bar')); + expect(edges.length).toBe(1); + expect(edges[0].edge_kind).toBe('calls'); + expect(edges[0].name).toBe('foo'); + }); + + it('returns all incoming edges with edge_kind', () => { + const { repo, ids } = makeRepo(); + const edges = repo.findAllIncomingEdges(ids.get('foo')); + expect(edges.length).toBe(2); + expect(edges.every((e) => e.edge_kind === 'calls')).toBe(true); + }); + }); + + describe('findCalleeNames / findCallerNames', () => { + it('returns sorted callee names', () => { + const { repo, ids } = createTestRepo() + .fn('main', 'src/m.js', 1) + .fn('beta', 'src/b.js', 1) + .fn('alpha', 'src/a.js', 1) + .calls('main', 'beta') + .calls('main', 'alpha') + .build(); + expect(repo.findCalleeNames(ids.get('main'))).toEqual(['alpha', 'beta']); + }); + + it('returns sorted caller names', () => { + const { repo, ids } = makeRepo(); + expect(repo.findCallerNames(ids.get('foo'))).toEqual(['Baz', 'bar']); + }); + }); + + describe('import edge queries', () => { + it('finds import targets and sources', () => { + const { repo, ids } = createTestRepo() + .file('src/app.js') + .file('src/utils.js') + .imports('src/app.js', 'src/utils.js') + .build(); + const targets = repo.findImportTargets(ids.get('src/app.js')); + expect(targets.length).toBe(1); + expect(targets[0].file).toBe('src/utils.js'); + + const sources = repo.findImportSources(ids.get('src/utils.js')); + expect(sources.length).toBe(1); + expect(sources[0].file).toBe('src/app.js'); + }); + + it('finds import dependents', () => { + const { repo, ids } = createTestRepo() + .file('src/app.js') + .file('src/utils.js') + .imports('src/app.js', 'src/utils.js') + .build(); + const deps = repo.findImportDependents(ids.get('src/utils.js')); + expect(deps.length).toBe(1); + expect(deps[0].name).toBe('src/app.js'); + }); + }); + + describe('findCrossFileCallTargets', () => { + it('returns set of IDs called from other files', () => { + const { repo, ids } = makeRepo(); + const targets = repo.findCrossFileCallTargets('src/foo.js'); + expect(targets.size).toBe(1); + expect(targets.has(ids.get('foo'))).toBe(true); + }); + }); + + describe('countCrossFileCallers', () => { + it('counts callers from different files', () => { + const { repo, ids } = makeRepo(); + expect(repo.countCrossFileCallers(ids.get('foo'), 'src/foo.js')).toBe(2); + }); + }); + + describe('getClassHierarchy', () => { + it('returns empty set for no extends', () => { + const { repo, ids } = makeRepo(); + expect(repo.getClassHierarchy(ids.get('Baz')).size).toBe(0); + }); + + it('resolves multi-level hierarchy', () => { + const { repo, ids } = createTestRepo() + .cls('Child', 'src/c.js', 1) + .cls('Parent', 'src/p.js', 1) + .cls('Grandparent', 'src/g.js', 1) + .extends('Child', 'Parent') + .extends('Parent', 'Grandparent') + .build(); + const ancestors = repo.getClassHierarchy(ids.get('Child')); + expect(ancestors.size).toBe(2); + expect(ancestors.has(ids.get('Parent'))).toBe(true); + expect(ancestors.has(ids.get('Grandparent'))).toBe(true); + }); + + it('handles diamond inheritance', () => { + const { repo, ids } = createTestRepo() + .cls('D', 'src/d.js', 1) + .cls('B1', 'src/b1.js', 1) + .cls('B2', 'src/b2.js', 1) + .cls('Top', 'src/top.js', 1) + .extends('D', 'B1') + .extends('D', 'B2') + .extends('B1', 'Top') + .extends('B2', 'Top') + .build(); + const ancestors = repo.getClassHierarchy(ids.get('D')); + expect(ancestors.size).toBe(3); + }); + }); + + describe('findIntraFileCallEdges', () => { + it('returns intra-file call pairs', () => { + const { repo } = createTestRepo() + .fn('a', 'src/f.js', 1) + .fn('b', 'src/f.js', 10) + .fn('c', 'src/other.js', 1) + .calls('a', 'b') + .calls('a', 'c') + .build(); + const edges = repo.findIntraFileCallEdges('src/f.js'); + expect(edges.length).toBe(1); + expect(edges[0]).toEqual({ caller_name: 'a', callee_name: 'b' }); + }); + }); + + // ── Graph-read queries ──────────────────────────────────────────── + + describe('getCallableNodes', () => { + it('returns core symbol kind nodes', () => { + const { repo } = makeRepo(); + const nodes = repo.getCallableNodes(); + // foo, bar, Baz, qux, testFn — all are core kinds + expect(nodes.length).toBe(5); + }); + }); + + describe('getCallEdges / getImportEdges', () => { + it('returns call edges', () => { + const { repo } = makeRepo(); + expect(repo.getCallEdges().length).toBe(2); + }); + + it('returns import edges', () => { + const { repo } = createTestRepo().file('a.js').file('b.js').imports('a.js', 'b.js').build(); + expect(repo.getImportEdges().length).toBe(1); + }); + }); + + describe('getFileNodesAll', () => { + it('returns file-kind nodes', () => { + const { repo } = createTestRepo().file('src/a.js').fn('main', 'src/a.js', 1).build(); + const files = repo.getFileNodesAll(); + expect(files.length).toBe(1); + expect(files[0].name).toBe('src/a.js'); + }); + }); + + // ── Optional table checks ───────────────────────────────────────── + + describe('optional table stubs', () => { + it('returns false for hasCfgTables, hasEmbeddings, hasDataflowTable', () => { + const { repo } = makeRepo(); + expect(repo.hasCfgTables()).toBe(false); + expect(repo.hasEmbeddings()).toBe(false); + expect(repo.hasDataflowTable()).toBe(false); + }); + }); + + describe('getComplexityForNode', () => { + it('returns complexity metrics', () => { + const { repo, ids } = makeRepo(); + const cx = repo.getComplexityForNode(ids.get('foo')); + expect(cx.cognitive).toBe(5); + expect(cx.cyclomatic).toBe(3); + expect(cx.max_nesting).toBe(2); + }); + + it('returns undefined for nodes without complexity', () => { + const { repo, ids } = makeRepo(); + expect(repo.getComplexityForNode(ids.get('bar'))).toBeUndefined(); + }); + }); +}); diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js new file mode 100644 index 00000000..7bea81da --- /dev/null +++ b/tests/unit/repository-parity.test.js @@ -0,0 +1,280 @@ +import Database from 'better-sqlite3'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { initSchema } from '../../src/db/migrations.js'; +import { InMemoryRepository } from '../../src/db/repository/in-memory-repository.js'; +import { SqliteRepository } from '../../src/db/repository/sqlite-repository.js'; + +/** + * Parity tests — run the same assertions against both SqliteRepository and + * InMemoryRepository to verify behavioral equivalence. + */ + +function seedSqliteRepo() { + const db = new Database(':memory:'); + initSchema(db); + + const insertNode = db.prepare( + 'INSERT INTO nodes (name, kind, file, line, end_line, role, scope, qualified_name) VALUES (?, ?, ?, ?, ?, ?, ?, ?)', + ); + insertNode.run('foo', 'function', 'src/foo.js', 1, 15, 'core', null, 'foo'); + insertNode.run('bar', 'method', 'src/bar.js', 10, 30, 'utility', 'BarClass', 'BarClass.bar'); + insertNode.run('Baz', 'class', 'src/baz.js', 20, 50, 'entry', null, 'Baz'); + insertNode.run('qux', 'interface', 'src/qux.js', 30, 40, null, null, null); + insertNode.run('testFn', 'function', 'tests/foo.test.js', 1, 10, null, null, null); + + const fooId = db.prepare("SELECT id FROM nodes WHERE name = 'foo'").get().id; + const barId = db.prepare("SELECT id FROM nodes WHERE name = 'bar'").get().id; + const bazId = db.prepare("SELECT id FROM nodes WHERE name = 'Baz'").get().id; + + const insertEdge = db.prepare('INSERT INTO edges (source_id, target_id, kind) VALUES (?, ?, ?)'); + insertEdge.run(barId, fooId, 'calls'); + insertEdge.run(bazId, fooId, 'calls'); + insertEdge.run(bazId, barId, 'extends'); + + db.prepare( + 'INSERT INTO function_complexity (node_id, cognitive, cyclomatic, max_nesting, maintainability_index, halstead_volume) VALUES (?, ?, ?, ?, ?, ?)', + ).run(fooId, 5, 3, 2, 80, 100); + + return { repo: new SqliteRepository(db), ids: { foo: fooId, bar: barId, baz: bazId } }; +} + +function seedInMemoryRepo() { + const repo = new InMemoryRepository(); + const fooId = repo.addNode({ + name: 'foo', + kind: 'function', + file: 'src/foo.js', + line: 1, + end_line: 15, + role: 'core', + qualified_name: 'foo', + }); + const barId = repo.addNode({ + name: 'bar', + kind: 'method', + file: 'src/bar.js', + line: 10, + end_line: 30, + role: 'utility', + scope: 'BarClass', + qualified_name: 'BarClass.bar', + }); + const bazId = repo.addNode({ + name: 'Baz', + kind: 'class', + file: 'src/baz.js', + line: 20, + end_line: 50, + role: 'entry', + qualified_name: 'Baz', + }); + repo.addNode({ name: 'qux', kind: 'interface', file: 'src/qux.js', line: 30, end_line: 40 }); + repo.addNode({ + name: 'testFn', + kind: 'function', + file: 'tests/foo.test.js', + line: 1, + end_line: 10, + }); + + repo.addEdge({ source_id: barId, target_id: fooId, kind: 'calls' }); + repo.addEdge({ source_id: bazId, target_id: fooId, kind: 'calls' }); + repo.addEdge({ source_id: bazId, target_id: barId, kind: 'extends' }); + + repo.addComplexity(fooId, { + cognitive: 5, + cyclomatic: 3, + max_nesting: 2, + maintainability_index: 80, + halstead_volume: 100, + }); + + return { repo, ids: { foo: fooId, bar: barId, baz: bazId } }; +} + +describe.each([ + { label: 'SqliteRepository', seed: seedSqliteRepo }, + { label: 'InMemoryRepository', seed: seedInMemoryRepo }, +])('Repository parity: $label', ({ seed }) => { + let repo; + let ids; + let dbToClose; + + beforeEach(() => { + const result = seed(); + repo = result.repo; + ids = result.ids; + if (repo.db) dbToClose = repo.db; + }); + + afterEach(() => { + if (dbToClose) dbToClose.close(); + }); + + // ── Counts ────────────────────────────────────────────────────────── + + it('countNodes', () => { + expect(repo.countNodes()).toBe(5); + }); + + it('countEdges', () => { + expect(repo.countEdges()).toBe(3); // 2 calls + 1 extends + }); + + it('countFiles', () => { + expect(repo.countFiles()).toBe(5); + }); + + // ── Node lookups ──────────────────────────────────────────────────── + + it('findNodeById', () => { + const node = repo.findNodeById(ids.foo); + expect(node).toBeDefined(); + expect(node.name).toBe('foo'); + expect(node.kind).toBe('function'); + }); + + it('findNodeById returns undefined for missing', () => { + expect(repo.findNodeById(9999)).toBeUndefined(); + }); + + it('findNodesByFile', () => { + const nodes = repo.findNodesByFile('src/foo.js'); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('foo'); + }); + + it('getNodeId', () => { + expect(repo.getNodeId('foo', 'function', 'src/foo.js', 1)).toBe(ids.foo); + expect(repo.getNodeId('nope', 'function', 'x.js', 1)).toBeUndefined(); + }); + + it('getFunctionNodeId', () => { + expect(repo.getFunctionNodeId('foo', 'src/foo.js', 1)).toBe(ids.foo); + // class is not function/method + expect(repo.getFunctionNodeId('Baz', 'src/baz.js', 20)).toBeUndefined(); + }); + + it('bulkNodeIdsByFile', () => { + const rows = repo.bulkNodeIdsByFile('src/foo.js'); + expect(rows.length).toBe(1); + expect(rows[0].name).toBe('foo'); + }); + + // ── Listing / iteration ───────────────────────────────────────────── + + it('listFunctionNodes returns fn/method/class', () => { + const rows = repo.listFunctionNodes(); + expect(rows.length).toBe(4); + expect(rows.every((r) => ['function', 'method', 'class'].includes(r.kind))).toBe(true); + }); + + it('listFunctionNodes excludes tests', () => { + const rows = repo.listFunctionNodes({ noTests: true }); + expect(rows.length).toBe(3); + expect(rows.every((r) => !r.file.includes('.test.'))).toBe(true); + }); + + it('listFunctionNodes filters by pattern', () => { + const rows = repo.listFunctionNodes({ pattern: 'Baz' }); + expect(rows.length).toBe(1); + }); + + it('iterateFunctionNodes matches listFunctionNodes', () => { + const list = repo.listFunctionNodes(); + const iter = [...repo.iterateFunctionNodes()]; + expect(iter.length).toBe(list.length); + }); + + // ── Edge queries ──────────────────────────────────────────────────── + + it('findCallees', () => { + const callees = repo.findCallees(ids.bar); + expect(callees.length).toBe(1); + expect(callees[0].name).toBe('foo'); + }); + + it('findCallers', () => { + const callers = repo.findCallers(ids.foo); + expect(callers.length).toBe(2); + const names = callers.map((c) => c.name).sort(); + expect(names).toEqual(['Baz', 'bar']); + }); + + it('findDistinctCallers', () => { + const callers = repo.findDistinctCallers(ids.foo); + expect(callers.length).toBe(2); + }); + + it('findCalleeNames / findCallerNames', () => { + const calleeNames = repo.findCalleeNames(ids.bar); + expect(calleeNames).toEqual(['foo']); + + const callerNames = repo.findCallerNames(ids.foo); + expect(callerNames).toEqual(['Baz', 'bar']); + }); + + it('findAllOutgoingEdges', () => { + const edges = repo.findAllOutgoingEdges(ids.baz); + expect(edges.length).toBe(2); // calls foo, extends bar + const kinds = edges.map((e) => e.edge_kind).sort(); + expect(kinds).toEqual(['calls', 'extends']); + }); + + it('findAllIncomingEdges', () => { + const edges = repo.findAllIncomingEdges(ids.foo); + expect(edges.length).toBe(2); + expect(edges.every((e) => e.edge_kind === 'calls')).toBe(true); + }); + + it('findCrossFileCallTargets', () => { + const targets = repo.findCrossFileCallTargets('src/foo.js'); + expect(targets.size).toBe(1); + expect(targets.has(ids.foo)).toBe(true); + }); + + it('countCrossFileCallers', () => { + expect(repo.countCrossFileCallers(ids.foo, 'src/foo.js')).toBe(2); + }); + + it('findIntraFileCallEdges', () => { + // No intra-file calls in our graph (foo, bar, Baz are all different files) + const edges = repo.findIntraFileCallEdges('src/foo.js'); + expect(edges.length).toBe(0); + }); + + // ── Class hierarchy ───────────────────────────────────────────────── + + it('getClassHierarchy', () => { + const ancestors = repo.getClassHierarchy(ids.baz); + expect(ancestors.size).toBe(1); // Baz extends bar + expect(ancestors.has(ids.bar)).toBe(true); + }); + + // ── Graph-read queries ────────────────────────────────────────────── + + it('getCallEdges', () => { + const edges = repo.getCallEdges(); + expect(edges.length).toBe(2); // only 'calls' edges, not 'extends' + }); + + // ── Complexity ────────────────────────────────────────────────────── + + it('getComplexityForNode', () => { + const cx = repo.getComplexityForNode(ids.foo); + expect(cx).toBeDefined(); + expect(cx.cognitive).toBe(5); + expect(cx.cyclomatic).toBe(3); + expect(cx.max_nesting).toBe(2); + }); + + it('getComplexityForNode returns undefined for no data', () => { + expect(repo.getComplexityForNode(ids.bar)).toBeUndefined(); + }); + + // ── Optional table checks ────────────────────────────────────────── + + it('hasDataflowTable returns false', () => { + expect(repo.hasDataflowTable()).toBe(false); + }); +}); From 2c9c5fc407f98a3ae3fea474dd546fb13e738e2f Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sat, 14 Mar 2026 14:36:30 -0600 Subject: [PATCH 02/12] fix: address PR review feedback from Greptile - Add kind/role validation to InMemoryRepository.findNodesForTriage matching SqliteRepository behavior (throws ConfigError) - Optimize findIntraFileCallEdges sort with pre-built lookup map instead of spreading nodes Map per comparison - Remove dead #nameIndex field from TestRepoBuilder - Add validation parity tests for invalid kind/role Impact: 5 functions changed, 0 affected --- src/db/repository/in-memory-repository.js | 25 +++++++++++++++-------- tests/helpers/fixtures.js | 2 -- tests/unit/in-memory-repository.test.js | 10 +++++++++ tests/unit/repository-parity.test.js | 10 +++++++++ 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js index 4df622fd..86233662 100644 --- a/src/db/repository/in-memory-repository.js +++ b/src/db/repository/in-memory-repository.js @@ -1,4 +1,5 @@ -import { CORE_SYMBOL_KINDS } from '../../kinds.js'; +import { ConfigError } from '../../errors.js'; +import { CORE_SYMBOL_KINDS, EVERY_SYMBOL_KIND, VALID_ROLES } from '../../kinds.js'; import { Repository } from './base.js'; /** @@ -223,6 +224,16 @@ export class InMemoryRepository extends Repository { } findNodesForTriage(opts = {}) { + if (opts.kind && !EVERY_SYMBOL_KIND.includes(opts.kind)) { + throw new ConfigError( + `Invalid kind: ${opts.kind} (expected one of ${EVERY_SYMBOL_KIND.join(', ')})`, + ); + } + if (opts.role && !VALID_ROLES.includes(opts.role)) { + throw new ConfigError( + `Invalid role: ${opts.role} (expected one of ${VALID_ROLES.join(', ')})`, + ); + } const kindsToUse = opts.kind ? [opts.kind] : ['function', 'method', 'class']; let nodes = [...this.#nodes.values()].filter((n) => kindsToUse.includes(n.kind)); @@ -450,14 +461,12 @@ export class InMemoryRepository extends Repository { results.push({ caller_name: caller.name, callee_name: callee.name }); } } + const lineByName = new Map(); + for (const n of this.#nodes.values()) { + if (n.file === file) lineByName.set(n.name, n.line); + } return results.sort((a, b) => { - const callerA = [...this.#nodes.values()].find( - (n) => n.name === a.caller_name && n.file === file, - ); - const callerB = [...this.#nodes.values()].find( - (n) => n.name === b.caller_name && n.file === file, - ); - return (callerA?.line ?? 0) - (callerB?.line ?? 0); + return (lineByName.get(a.caller_name) ?? 0) - (lineByName.get(b.caller_name) ?? 0); }); } diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js index 389b4880..f98a9e87 100644 --- a/tests/helpers/fixtures.js +++ b/tests/helpers/fixtures.js @@ -12,7 +12,6 @@ import { InMemoryRepository } from '../../src/db/repository/in-memory-repository */ class TestRepoBuilder { #pending = { nodes: [], edges: [], complexity: [] }; - #nameIndex = new Map(); // name → { kind, file, line, ... } /** * Add a function node. @@ -128,7 +127,6 @@ class TestRepoBuilder { #addNode(name, kind, file, line, extra = {}) { this.#pending.nodes.push({ name, kind, file, line, ...extra }); - this.#nameIndex.set(name, { kind, file, line }); return this; } } diff --git a/tests/unit/in-memory-repository.test.js b/tests/unit/in-memory-repository.test.js index 0ca30857..dc0c3ff4 100644 --- a/tests/unit/in-memory-repository.test.js +++ b/tests/unit/in-memory-repository.test.js @@ -317,6 +317,16 @@ describe('InMemoryRepository', () => { expect(rows.length).toBe(1); expect(rows[0].name).toBe('foo'); }); + + it('throws on invalid kind', () => { + const { repo } = makeRepo(); + expect(() => repo.findNodesForTriage({ kind: 'bogus' })).toThrow('Invalid kind'); + }); + + it('throws on invalid role', () => { + const { repo } = makeRepo(); + expect(() => repo.findNodesForTriage({ role: 'supervisor' })).toThrow('Invalid role'); + }); }); // ── Edge queries ────────────────────────────────────────────────── diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js index 7bea81da..6c2f64fe 100644 --- a/tests/unit/repository-parity.test.js +++ b/tests/unit/repository-parity.test.js @@ -277,4 +277,14 @@ describe.each([ it('hasDataflowTable returns false', () => { expect(repo.hasDataflowTable()).toBe(false); }); + + // ── Validation ──────────────────────────────────────────────────── + + it('findNodesForTriage throws on invalid kind', () => { + expect(() => repo.findNodesForTriage({ kind: 'bogus' })).toThrow('Invalid kind'); + }); + + it('findNodesForTriage throws on invalid role', () => { + expect(() => repo.findNodesForTriage({ role: 'supervisor' })).toThrow('Invalid role'); + }); }); From 1ac56afd862e1259f6f496ced67b1489f9538b68 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sun, 15 Mar 2026 00:55:04 -0600 Subject: [PATCH 03/12] fix: throw on duplicate node names in TestRepoBuilder.build() Impact: 2 functions changed, 0 affected --- tests/helpers/fixtures.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js index f98a9e87..e8db712c 100644 --- a/tests/helpers/fixtures.js +++ b/tests/helpers/fixtures.js @@ -103,6 +103,9 @@ class TestRepoBuilder { // Add nodes for (const n of this.#pending.nodes) { const id = repo.addNode(n); + if (ids.has(n.name)) { + throw new Error(`Duplicate node name: "${n.name}" — use unique names or qualify with file path`); + } ids.set(n.name, id); } From 65230c6192302227641c251e5fdd5013b230b1b1 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sun, 15 Mar 2026 00:55:26 -0600 Subject: [PATCH 04/12] style: format duplicate-name error for biome line length Impact: 2 functions changed, 0 affected --- tests/helpers/fixtures.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js index e8db712c..2242263b 100644 --- a/tests/helpers/fixtures.js +++ b/tests/helpers/fixtures.js @@ -104,7 +104,9 @@ class TestRepoBuilder { for (const n of this.#pending.nodes) { const id = repo.addNode(n); if (ids.has(n.name)) { - throw new Error(`Duplicate node name: "${n.name}" — use unique names or qualify with file path`); + throw new Error( + `Duplicate node name: "${n.name}" — use unique names or qualify with file path`, + ); } ids.set(n.name, id); } From 31ebeac9ee6f400267a6301885f463b73109fe10 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sun, 15 Mar 2026 02:09:29 -0600 Subject: [PATCH 05/12] fix: check for duplicate names before adding node to repo Move the duplicate-name guard before repo.addNode() so the repo is not mutated when a duplicate is detected. Impact: 2 functions changed, 0 affected --- tests/helpers/fixtures.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js index 2242263b..44ed8305 100644 --- a/tests/helpers/fixtures.js +++ b/tests/helpers/fixtures.js @@ -102,12 +102,12 @@ class TestRepoBuilder { // Add nodes for (const n of this.#pending.nodes) { - const id = repo.addNode(n); if (ids.has(n.name)) { throw new Error( `Duplicate node name: "${n.name}" — use unique names or qualify with file path`, ); } + const id = repo.addNode(n); ids.set(n.name, id); } From 6ca47eaef8b48beedec986603b8f73cc00dce9b8 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sun, 15 Mar 2026 20:27:34 -0600 Subject: [PATCH 06/12] fix: escape LIKE special chars in InMemoryRepository file filters findNodesByScope, findNodeByQualifiedName, findNodesWithFanIn, listFunctionNodes, and findNodesForTriage all passed user-provided strings directly into likeToRegex without escaping % and _ chars. This caused divergence from SQLite which uses escapeLike + ESCAPE '\'. Also adds parity tests for findNodesByScope, findNodeByQualifiedName, findNodesWithFanIn, findFileNodes, findNodeChildren, import queries, getCallableNodes, getFileNodesAll, getImportEdges, and hasCfgTables. Impact: 7 functions changed, 5 affected --- src/db/repository/in-memory-repository.js | 22 ++- tests/unit/repository-parity.test.js | 175 ++++++++++++++++++++-- 2 files changed, 181 insertions(+), 16 deletions(-) diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js index 86233662..94177a33 100644 --- a/src/db/repository/in-memory-repository.js +++ b/src/db/repository/in-memory-repository.js @@ -2,6 +2,16 @@ import { ConfigError } from '../../errors.js'; import { CORE_SYMBOL_KINDS, EVERY_SYMBOL_KIND, VALID_ROLES } from '../../kinds.js'; import { Repository } from './base.js'; +/** + * Escape LIKE special characters so they are treated as literals. + * Mirrors the `escapeLike` function in `nodes.js`. + * @param {string} s + * @returns {string} + */ +function escapeLike(s) { + return s.replace(/[%_\\]/g, '\\$&'); +} + /** * Convert a SQL LIKE pattern to a RegExp (case-insensitive). * Supports `%` (any chars) and `_` (single char). @@ -121,7 +131,7 @@ export class InMemoryRepository extends Repository { nodes = nodes.filter((n) => opts.kinds.includes(n.kind)); } if (opts.file) { - const fileRe = likeToRegex(`%${opts.file}%`); + const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); nodes = nodes.filter((n) => fileRe.test(n.file)); } @@ -197,7 +207,7 @@ export class InMemoryRepository extends Repository { nodes = nodes.filter((n) => n.kind === opts.kind); } if (opts.file) { - const fileRe = likeToRegex(`%${opts.file}%`); + const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); nodes = nodes.filter((n) => fileRe.test(n.file)); } @@ -208,7 +218,7 @@ export class InMemoryRepository extends Repository { let nodes = [...this.#nodes.values()].filter((n) => n.qualified_name === qualifiedName); if (opts.file) { - const fileRe = likeToRegex(`%${opts.file}%`); + const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); nodes = nodes.filter((n) => fileRe.test(n.file)); } @@ -241,7 +251,7 @@ export class InMemoryRepository extends Repository { nodes = nodes.filter((n) => !n.file.includes('.test.') && !n.file.includes('.spec.')); } if (opts.file) { - const fileRe = likeToRegex(`%${opts.file}%`); + const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); nodes = nodes.filter((n) => fileRe.test(n.file)); } if (opts.role) { @@ -534,11 +544,11 @@ export class InMemoryRepository extends Repository { ); if (opts.file) { - const fileRe = likeToRegex(`%${opts.file}%`); + const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); nodes = nodes.filter((n) => fileRe.test(n.file)); } if (opts.pattern) { - const patternRe = likeToRegex(`%${opts.pattern}%`); + const patternRe = likeToRegex(`%${escapeLike(opts.pattern)}%`); nodes = nodes.filter((n) => patternRe.test(n.name)); } if (opts.noTests) { diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js index 6c2f64fe..4219082c 100644 --- a/tests/unit/repository-parity.test.js +++ b/tests/unit/repository-parity.test.js @@ -21,21 +21,41 @@ function seedSqliteRepo() { insertNode.run('Baz', 'class', 'src/baz.js', 20, 50, 'entry', null, 'Baz'); insertNode.run('qux', 'interface', 'src/qux.js', 30, 40, null, null, null); insertNode.run('testFn', 'function', 'tests/foo.test.js', 1, 10, null, null, null); + // File-kind node for findFileNodes / getFileNodesAll + db.prepare('INSERT INTO nodes (name, kind, file, line) VALUES (?, ?, ?, ?)').run( + 'src/foo.js', + 'file', + 'src/foo.js', + 0, + ); + // Child node with parent_id for findNodeChildren + db.prepare( + 'INSERT INTO nodes (name, kind, file, line, end_line, parent_id, scope) VALUES (?, ?, ?, ?, ?, ?, ?)', + ).run('bazMethod', 'method', 'src/baz.js', 25, 35, null, 'Baz'); const fooId = db.prepare("SELECT id FROM nodes WHERE name = 'foo'").get().id; const barId = db.prepare("SELECT id FROM nodes WHERE name = 'bar'").get().id; const bazId = db.prepare("SELECT id FROM nodes WHERE name = 'Baz'").get().id; + const fooFileId = db.prepare("SELECT id FROM nodes WHERE name = 'src/foo.js'").get().id; + const bazMethodId = db.prepare("SELECT id FROM nodes WHERE name = 'bazMethod'").get().id; + // Set parent_id for bazMethod -> Baz + db.prepare('UPDATE nodes SET parent_id = ? WHERE id = ?').run(bazId, bazMethodId); const insertEdge = db.prepare('INSERT INTO edges (source_id, target_id, kind) VALUES (?, ?, ?)'); insertEdge.run(barId, fooId, 'calls'); insertEdge.run(bazId, fooId, 'calls'); insertEdge.run(bazId, barId, 'extends'); + // Import edge: foo.js imports bar.js + insertEdge.run(fooId, barId, 'imports'); db.prepare( 'INSERT INTO function_complexity (node_id, cognitive, cyclomatic, max_nesting, maintainability_index, halstead_volume) VALUES (?, ?, ?, ?, ?, ?)', ).run(fooId, 5, 3, 2, 80, 100); - return { repo: new SqliteRepository(db), ids: { foo: fooId, bar: barId, baz: bazId } }; + return { + repo: new SqliteRepository(db), + ids: { foo: fooId, bar: barId, baz: bazId, fooFile: fooFileId, bazMethod: bazMethodId }, + }; } function seedInMemoryRepo() { @@ -76,10 +96,24 @@ function seedInMemoryRepo() { line: 1, end_line: 10, }); + // File-kind node for findFileNodes / getFileNodesAll + const fooFileId = repo.addNode({ name: 'src/foo.js', kind: 'file', file: 'src/foo.js', line: 0 }); + // Child node with parent_id for findNodeChildren + const bazMethodId = repo.addNode({ + name: 'bazMethod', + kind: 'method', + file: 'src/baz.js', + line: 25, + end_line: 35, + parent_id: bazId, + scope: 'Baz', + }); repo.addEdge({ source_id: barId, target_id: fooId, kind: 'calls' }); repo.addEdge({ source_id: bazId, target_id: fooId, kind: 'calls' }); repo.addEdge({ source_id: bazId, target_id: barId, kind: 'extends' }); + // Import edge: foo imports bar + repo.addEdge({ source_id: fooId, target_id: barId, kind: 'imports' }); repo.addComplexity(fooId, { cognitive: 5, @@ -89,7 +123,7 @@ function seedInMemoryRepo() { halstead_volume: 100, }); - return { repo, ids: { foo: fooId, bar: barId, baz: bazId } }; + return { repo, ids: { foo: fooId, bar: barId, baz: bazId, fooFile: fooFileId, bazMethod: bazMethodId } }; } describe.each([ @@ -114,15 +148,15 @@ describe.each([ // ── Counts ────────────────────────────────────────────────────────── it('countNodes', () => { - expect(repo.countNodes()).toBe(5); + expect(repo.countNodes()).toBe(7); }); it('countEdges', () => { - expect(repo.countEdges()).toBe(3); // 2 calls + 1 extends + expect(repo.countEdges()).toBe(4); // 2 calls + 1 extends + 1 imports }); it('countFiles', () => { - expect(repo.countFiles()).toBe(5); + expect(repo.countFiles()).toBe(5); // foo.js, bar.js, baz.js, qux.js, foo.test.js }); // ── Node lookups ──────────────────────────────────────────────────── @@ -157,27 +191,27 @@ describe.each([ it('bulkNodeIdsByFile', () => { const rows = repo.bulkNodeIdsByFile('src/foo.js'); - expect(rows.length).toBe(1); - expect(rows[0].name).toBe('foo'); + expect(rows.length).toBe(2); // foo (function) + src/foo.js (file) + expect(rows.map((r) => r.name).sort()).toEqual(['foo', 'src/foo.js']); }); // ── Listing / iteration ───────────────────────────────────────────── it('listFunctionNodes returns fn/method/class', () => { const rows = repo.listFunctionNodes(); - expect(rows.length).toBe(4); + expect(rows.length).toBe(5); // foo, bar, Baz, testFn, bazMethod expect(rows.every((r) => ['function', 'method', 'class'].includes(r.kind))).toBe(true); }); it('listFunctionNodes excludes tests', () => { const rows = repo.listFunctionNodes({ noTests: true }); - expect(rows.length).toBe(3); + expect(rows.length).toBe(4); // foo, bar, Baz, bazMethod expect(rows.every((r) => !r.file.includes('.test.'))).toBe(true); }); it('listFunctionNodes filters by pattern', () => { const rows = repo.listFunctionNodes({ pattern: 'Baz' }); - expect(rows.length).toBe(1); + expect(rows.length).toBe(2); // Baz + bazMethod (LIKE is case-insensitive) }); it('iterateFunctionNodes matches listFunctionNodes', () => { @@ -287,4 +321,125 @@ describe.each([ it('findNodesForTriage throws on invalid role', () => { expect(() => repo.findNodesForTriage({ role: 'supervisor' })).toThrow('Invalid role'); }); + + // ── Scope / qualified-name lookups ────────────────────────────────── + + it('findNodesByScope', () => { + const nodes = repo.findNodesByScope('BarClass'); + expect(nodes.length).toBe(1); // only bar has scope 'BarClass' + expect(nodes[0].name).toBe('bar'); + }); + + it('findNodesByScope with file filter', () => { + const nodes = repo.findNodesByScope('BarClass', { file: 'bar.js' }); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('bar'); + }); + + it('findNodeByQualifiedName', () => { + const nodes = repo.findNodeByQualifiedName('BarClass.bar'); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('bar'); + }); + + it('findNodeByQualifiedName with file filter', () => { + const nodes = repo.findNodeByQualifiedName('BarClass.bar', { file: 'bar.js' }); + expect(nodes.length).toBe(1); + expect(nodes[0].name).toBe('bar'); + }); + + // ── LIKE escaping ───────────────────────────────────────────────── + + it('findNodesByScope treats _ as literal in file filter', () => { + // "_" is a LIKE wildcard — must not match arbitrary single chars + const nodes = repo.findNodesByScope('BarClass', { file: '_ar.js' }); + expect(nodes.length).toBe(0); + }); + + // ── Fan-in ───────────────────────────────────────────────────────── + + it('findNodesWithFanIn', () => { + const nodes = repo.findNodesWithFanIn('%foo%'); + expect(nodes.length).toBeGreaterThanOrEqual(1); + const foo = nodes.find((n) => n.name === 'foo'); + expect(foo).toBeDefined(); + expect(foo.fan_in).toBe(2); // bar calls foo, Baz calls foo + }); + + // ── File nodes ──────────────────────────────────────────────────── + + it('findFileNodes', () => { + const nodes = repo.findFileNodes('%foo%'); + expect(nodes.length).toBe(1); + expect(nodes[0].kind).toBe('file'); + expect(nodes[0].file).toBe('src/foo.js'); + }); + + it('getFileNodesAll', () => { + const nodes = repo.getFileNodesAll(); + expect(nodes.length).toBe(1); + expect(nodes[0].file).toBe('src/foo.js'); + }); + + // ── Node children ───────────────────────────────────────────────── + + it('findNodeChildren', () => { + const children = repo.findNodeChildren(ids.baz); + expect(children.length).toBe(1); + expect(children[0].name).toBe('bazMethod'); + expect(children[0].kind).toBe('method'); + }); + + it('findNodeChildren returns empty for leaf', () => { + expect(repo.findNodeChildren(ids.foo).length).toBe(0); + }); + + // ── Import queries ──────────────────────────────────────────────── + + it('findImportTargets', () => { + const targets = repo.findImportTargets(ids.foo); + expect(targets.length).toBe(1); + expect(targets[0].file).toBe('src/bar.js'); + expect(targets[0].edge_kind).toBe('imports'); + }); + + it('findImportSources', () => { + const sources = repo.findImportSources(ids.bar); + expect(sources.length).toBe(1); + expect(sources[0].file).toBe('src/foo.js'); + }); + + it('findImportDependents', () => { + const deps = repo.findImportDependents(ids.bar); + expect(deps.length).toBe(1); + expect(deps[0].name).toBe('foo'); + }); + + it('getImportEdges', () => { + const edges = repo.getImportEdges(); + expect(edges.length).toBe(1); + expect(edges[0].source_id).toBe(ids.foo); + expect(edges[0].target_id).toBe(ids.bar); + }); + + // ── Callable nodes ──────────────────────────────────────────────── + + it('getCallableNodes', () => { + const nodes = repo.getCallableNodes(); + // CORE_SYMBOL_KINDS includes function, method, class, interface, etc. + expect(nodes.length).toBeGreaterThanOrEqual(5); + expect(nodes.every((n) => n.id && n.name && n.kind && n.file)).toBe(true); + }); + + // ── Optional table checks ───────────────────────────────────────── + + it('hasCfgTables', () => { + // SQLite returns true (initSchema creates cfg tables), InMemory returns false. + // Both are correct — just verify the method exists and returns a boolean. + expect(typeof repo.hasCfgTables()).toBe('boolean'); + }); + + it('hasEmbeddings returns false', () => { + expect(repo.hasEmbeddings()).toBe(false); + }); }); From e00fe4d0acb4ce4ec03d444b64e6651b6eab2573 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Sun, 15 Mar 2026 20:28:12 -0600 Subject: [PATCH 07/12] style: format repository-parity test file --- tests/unit/repository-parity.test.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js index 4219082c..73e2b22a 100644 --- a/tests/unit/repository-parity.test.js +++ b/tests/unit/repository-parity.test.js @@ -123,7 +123,10 @@ function seedInMemoryRepo() { halstead_volume: 100, }); - return { repo, ids: { foo: fooId, bar: barId, baz: bazId, fooFile: fooFileId, bazMethod: bazMethodId } }; + return { + repo, + ids: { foo: fooId, bar: barId, baz: bazId, fooFile: fooFileId, bazMethod: bazMethodId }, + }; } describe.each([ From 1f5b03b5ffdac4cd88c90c150401e31a0f007de1 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 16 Mar 2026 00:12:53 -0600 Subject: [PATCH 08/12] fix: align InMemoryRepository noTests filter with SQLite excludeTests The in-memory noTests filter only checked .test. and .spec. patterns, missing __test__, __tests__, and .stories. that the SQLite query-builder excludes. Add the missing patterns to both findNodesForTriage and iterateFunctionNodes. Extend parity test fixtures with __tests__ and .stories. nodes so the divergence is caught automatically. Impact: 5 functions changed, 0 affected --- src/db/repository/in-memory-repository.js | 18 ++++++++++++++-- tests/helpers/fixtures.js | 3 +-- tests/unit/repository-parity.test.js | 26 +++++++++++++++++++---- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js index 94177a33..526bd28d 100644 --- a/src/db/repository/in-memory-repository.js +++ b/src/db/repository/in-memory-repository.js @@ -248,7 +248,14 @@ export class InMemoryRepository extends Repository { let nodes = [...this.#nodes.values()].filter((n) => kindsToUse.includes(n.kind)); if (opts.noTests) { - nodes = nodes.filter((n) => !n.file.includes('.test.') && !n.file.includes('.spec.')); + nodes = nodes.filter( + (n) => + !n.file.includes('.test.') && + !n.file.includes('.spec.') && + !n.file.includes('__test__') && + !n.file.includes('__tests__') && + !n.file.includes('.stories.'), + ); } if (opts.file) { const fileRe = likeToRegex(`%${escapeLike(opts.file)}%`); @@ -552,7 +559,14 @@ export class InMemoryRepository extends Repository { nodes = nodes.filter((n) => patternRe.test(n.name)); } if (opts.noTests) { - nodes = nodes.filter((n) => !n.file.includes('.test.') && !n.file.includes('.spec.')); + nodes = nodes.filter( + (n) => + !n.file.includes('.test.') && + !n.file.includes('.spec.') && + !n.file.includes('__test__') && + !n.file.includes('__tests__') && + !n.file.includes('.stories.'), + ); } nodes.sort((a, b) => a.file.localeCompare(b.file) || a.line - b.line); diff --git a/tests/helpers/fixtures.js b/tests/helpers/fixtures.js index 44ed8305..e9225b12 100644 --- a/tests/helpers/fixtures.js +++ b/tests/helpers/fixtures.js @@ -107,8 +107,7 @@ class TestRepoBuilder { `Duplicate node name: "${n.name}" — use unique names or qualify with file path`, ); } - const id = repo.addNode(n); - ids.set(n.name, id); + ids.set(n.name, repo.addNode(n)); } // Add edges diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js index 73e2b22a..33bf21ed 100644 --- a/tests/unit/repository-parity.test.js +++ b/tests/unit/repository-parity.test.js @@ -21,6 +21,8 @@ function seedSqliteRepo() { insertNode.run('Baz', 'class', 'src/baz.js', 20, 50, 'entry', null, 'Baz'); insertNode.run('qux', 'interface', 'src/qux.js', 30, 40, null, null, null); insertNode.run('testFn', 'function', 'tests/foo.test.js', 1, 10, null, null, null); + insertNode.run('jestFn', 'function', 'src/__tests__/helper.js', 1, 10, null, null, null); + insertNode.run('storyFn', 'function', 'src/Button.stories.js', 1, 10, null, null, null); // File-kind node for findFileNodes / getFileNodesAll db.prepare('INSERT INTO nodes (name, kind, file, line) VALUES (?, ?, ?, ?)').run( 'src/foo.js', @@ -96,6 +98,20 @@ function seedInMemoryRepo() { line: 1, end_line: 10, }); + repo.addNode({ + name: 'jestFn', + kind: 'function', + file: 'src/__tests__/helper.js', + line: 1, + end_line: 10, + }); + repo.addNode({ + name: 'storyFn', + kind: 'function', + file: 'src/Button.stories.js', + line: 1, + end_line: 10, + }); // File-kind node for findFileNodes / getFileNodesAll const fooFileId = repo.addNode({ name: 'src/foo.js', kind: 'file', file: 'src/foo.js', line: 0 }); // Child node with parent_id for findNodeChildren @@ -151,7 +167,7 @@ describe.each([ // ── Counts ────────────────────────────────────────────────────────── it('countNodes', () => { - expect(repo.countNodes()).toBe(7); + expect(repo.countNodes()).toBe(9); }); it('countEdges', () => { @@ -159,7 +175,7 @@ describe.each([ }); it('countFiles', () => { - expect(repo.countFiles()).toBe(5); // foo.js, bar.js, baz.js, qux.js, foo.test.js + expect(repo.countFiles()).toBe(7); // foo.js, bar.js, baz.js, qux.js, foo.test.js, __tests__/helper.js, Button.stories.js }); // ── Node lookups ──────────────────────────────────────────────────── @@ -202,7 +218,7 @@ describe.each([ it('listFunctionNodes returns fn/method/class', () => { const rows = repo.listFunctionNodes(); - expect(rows.length).toBe(5); // foo, bar, Baz, testFn, bazMethod + expect(rows.length).toBe(7); // foo, bar, Baz, testFn, bazMethod, jestFn, storyFn expect(rows.every((r) => ['function', 'method', 'class'].includes(r.kind))).toBe(true); }); @@ -210,6 +226,8 @@ describe.each([ const rows = repo.listFunctionNodes({ noTests: true }); expect(rows.length).toBe(4); // foo, bar, Baz, bazMethod expect(rows.every((r) => !r.file.includes('.test.'))).toBe(true); + expect(rows.every((r) => !r.file.includes('__tests__'))).toBe(true); + expect(rows.every((r) => !r.file.includes('.stories.'))).toBe(true); }); it('listFunctionNodes filters by pattern', () => { @@ -430,7 +448,7 @@ describe.each([ it('getCallableNodes', () => { const nodes = repo.getCallableNodes(); // CORE_SYMBOL_KINDS includes function, method, class, interface, etc. - expect(nodes.length).toBeGreaterThanOrEqual(5); + expect(nodes.length).toBeGreaterThanOrEqual(7); expect(nodes.every((n) => n.id && n.name && n.kind && n.file)).toBe(true); }); From d04ad2c3ebed1ebf14360f8e2a723e4355c6edc6 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 16 Mar 2026 00:29:27 -0600 Subject: [PATCH 09/12] fix: escape LIKE wildcards in NodeQuery.fileFilter and nameLike fileFilter() and nameLike() in query-builder.js passed user input directly into LIKE patterns without escaping, so _ and % acted as wildcards instead of literals. This caused a parity gap where the SQLite findNodesWithFanIn treated _ as a wildcard but the in-memory version (which already called escapeLike) treated it as literal. Fix: add escapeLike() to query-builder.js (exported), use it in fileFilter() and nameLike() with ESCAPE '\', and deduplicate the copy in nodes.js by importing from query-builder.js. --- src/db/query-builder.js | 19 +++++++++++++------ src/db/repository/nodes.js | 7 +------ tests/unit/query-builder.test.js | 14 +++++++++++++- tests/unit/repository-parity.test.js | 6 ++++++ 4 files changed, 33 insertions(+), 13 deletions(-) diff --git a/src/db/query-builder.js b/src/db/query-builder.js index 2f43e754..12a15ecc 100644 --- a/src/db/query-builder.js +++ b/src/db/query-builder.js @@ -65,6 +65,13 @@ function validateEdgeKind(edgeKind) { } } +// ─── LIKE Escaping ────────────────────────────────────────────────── + +/** Escape LIKE wildcards in a literal string segment. */ +export function escapeLike(s) { + return s.replace(/[%_\\]/g, '\\$&'); +} + // ─── Standalone Helpers ────────────────────────────────────────────── /** @@ -164,11 +171,11 @@ export class NodeQuery { return this; } - /** WHERE n.file LIKE ? (no-op if falsy). */ + /** WHERE n.file LIKE ? (no-op if falsy). Escapes LIKE wildcards in the value. */ fileFilter(file) { if (!file) return this; - this.#conditions.push('n.file LIKE ?'); - this.#params.push(`%${file}%`); + this.#conditions.push("n.file LIKE ? ESCAPE '\\'"); + this.#params.push(`%${escapeLike(file)}%`); return this; } @@ -188,11 +195,11 @@ export class NodeQuery { return this; } - /** WHERE n.name LIKE ? (no-op if falsy). */ + /** WHERE n.name LIKE ? (no-op if falsy). Escapes LIKE wildcards in the value. */ nameLike(pattern) { if (!pattern) return this; - this.#conditions.push('n.name LIKE ?'); - this.#params.push(`%${pattern}%`); + this.#conditions.push("n.name LIKE ? ESCAPE '\\'"); + this.#params.push(`%${escapeLike(pattern)}%`); return this; } diff --git a/src/db/repository/nodes.js b/src/db/repository/nodes.js index 17876e13..cabc2ce3 100644 --- a/src/db/repository/nodes.js +++ b/src/db/repository/nodes.js @@ -1,6 +1,6 @@ import { ConfigError } from '../../errors.js'; import { EVERY_SYMBOL_KIND, VALID_ROLES } from '../../kinds.js'; -import { NodeQuery } from '../query-builder.js'; +import { escapeLike, NodeQuery } from '../query-builder.js'; import { cachedStmt } from './cached-stmt.js'; // ─── Query-builder based lookups (moved from src/db/repository.js) ───── @@ -250,11 +250,6 @@ export function findNodeChildren(db, parentId) { ).all(parentId); } -/** Escape LIKE wildcards in a literal string segment. */ -function escapeLike(s) { - return s.replace(/[%_\\]/g, '\\$&'); -} - /** * Find all nodes that belong to a given scope (by scope column). * Enables "all methods of class X" without traversing edges. diff --git a/tests/unit/query-builder.test.js b/tests/unit/query-builder.test.js index e53c5e70..7c47285d 100644 --- a/tests/unit/query-builder.test.js +++ b/tests/unit/query-builder.test.js @@ -157,6 +157,12 @@ describe('NodeQuery', () => { expect(rows.every((r) => r.file.includes('foo'))).toBe(true); }); + it('.fileFilter() escapes LIKE wildcards', () => { + // "_" should not match arbitrary single character + const rows = new NodeQuery().fileFilter('_oo').all(db); + expect(rows.length).toBe(0); + }); + it('.kindFilter() filters by exact kind', () => { const rows = new NodeQuery().kindFilter('class').all(db); expect(rows.length).toBe(1); @@ -174,6 +180,12 @@ describe('NodeQuery', () => { expect(rows.every((r) => r.name.toLowerCase().includes('ba'))).toBe(true); }); + it('.nameLike() escapes LIKE wildcards', () => { + // "_" should not match arbitrary single character + const rows = new NodeQuery().nameLike('_oo').all(db); + expect(rows.length).toBe(0); + }); + it('.where() adds raw condition', () => { const rows = new NodeQuery().where('n.line > ?', 5).all(db); expect(rows.every((r) => r.line > 5)).toBe(true); @@ -264,7 +276,7 @@ describe('NodeQuery', () => { .roleFilter('core') .build(); expect(sql).toContain('n.kind IN (?, ?)'); - expect(sql).toContain('n.file LIKE ?'); + expect(sql).toContain("n.file LIKE ? ESCAPE '\\'"); expect(sql).toContain('n.role = ?'); // All connected with AND const whereClause = sql.split('WHERE')[1]; diff --git a/tests/unit/repository-parity.test.js b/tests/unit/repository-parity.test.js index 33bf21ed..86d11610 100644 --- a/tests/unit/repository-parity.test.js +++ b/tests/unit/repository-parity.test.js @@ -387,6 +387,12 @@ describe.each([ expect(foo.fan_in).toBe(2); // bar calls foo, Baz calls foo }); + it('findNodesWithFanIn treats _ as literal in file filter', () => { + // "_" is a LIKE wildcard — must not match arbitrary single chars + const nodes = repo.findNodesWithFanIn('%foo%', { file: '_oo.js' }); + expect(nodes.length).toBe(0); + }); + // ── File nodes ──────────────────────────────────────────────────── it('findFileNodes', () => { From af036e60970a4eeebf2d1b0f8c1cf11513119c66 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 16 Mar 2026 00:35:49 -0600 Subject: [PATCH 10/12] fix: import escapeLike from query-builder instead of duplicating it --- src/db/repository/in-memory-repository.js | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/src/db/repository/in-memory-repository.js b/src/db/repository/in-memory-repository.js index 526bd28d..91205c3c 100644 --- a/src/db/repository/in-memory-repository.js +++ b/src/db/repository/in-memory-repository.js @@ -1,17 +1,8 @@ import { ConfigError } from '../../errors.js'; import { CORE_SYMBOL_KINDS, EVERY_SYMBOL_KIND, VALID_ROLES } from '../../kinds.js'; +import { escapeLike } from '../query-builder.js'; import { Repository } from './base.js'; -/** - * Escape LIKE special characters so they are treated as literals. - * Mirrors the `escapeLike` function in `nodes.js`. - * @param {string} s - * @returns {string} - */ -function escapeLike(s) { - return s.replace(/[%_\\]/g, '\\$&'); -} - /** * Convert a SQL LIKE pattern to a RegExp (case-insensitive). * Supports `%` (any chars) and `_` (single char). From 7b604c79387a2d53f4f0ffe00ccc039f87cdacc4 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 16 Mar 2026 01:23:30 -0600 Subject: [PATCH 11/12] fix(deps): pin native binaries to 3.1.4 in optionalDependencies Closes #454 --- package-lock.json | 48 +++++++++++++++++++++++------------------------ package.json | 12 ++++++------ 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/package-lock.json b/package-lock.json index f99ddf0e..dbc3c1e4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -44,12 +44,12 @@ }, "optionalDependencies": { "@modelcontextprotocol/sdk": "^1.0.0", - "@optave/codegraph-darwin-arm64": "3.1.3", - "@optave/codegraph-darwin-x64": "3.1.3", - "@optave/codegraph-linux-arm64-gnu": "3.1.3", - "@optave/codegraph-linux-x64-gnu": "3.1.3", - "@optave/codegraph-linux-x64-musl": "3.1.3", - "@optave/codegraph-win32-x64-msvc": "3.1.3" + "@optave/codegraph-darwin-arm64": "3.1.4", + "@optave/codegraph-darwin-x64": "3.1.4", + "@optave/codegraph-linux-arm64-gnu": "3.1.4", + "@optave/codegraph-linux-x64-gnu": "3.1.4", + "@optave/codegraph-linux-x64-musl": "3.1.4", + "@optave/codegraph-win32-x64-msvc": "3.1.4" }, "peerDependencies": { "@huggingface/transformers": "^3.8.1" @@ -1244,9 +1244,9 @@ } }, "node_modules/@optave/codegraph-darwin-arm64": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-darwin-arm64/-/codegraph-darwin-arm64-3.1.3.tgz", - "integrity": "sha512-9e+usj73OcpAUTtK1R7RGKk5qjvMRJRxDnN1QXhgwqsxDBZ1Pk2bOBZqXm8Dy7Q4NwsphJAMmHMOwxnnmNDWPg==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-darwin-arm64/-/codegraph-darwin-arm64-3.1.4.tgz", + "integrity": "sha512-WeDfyeLVFLJ8Oar0/L2mJZ3dTa2KPiLTytlWqnDbKn7PznS+mCBUhF7tav+W4zgvbiP8PcNAw24xb0ATLXcvzA==", "cpu": [ "arm64" ], @@ -1257,9 +1257,9 @@ ] }, "node_modules/@optave/codegraph-darwin-x64": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-darwin-x64/-/codegraph-darwin-x64-3.1.3.tgz", - "integrity": "sha512-eBM5TFh8h4VVt6of+xG4JBSs4orl1H5Wi7RDbaUQfvfaui/BIm/KbZCqcVv5cQnjwYssx0yBKILEuxqCa+Y6Rw==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-darwin-x64/-/codegraph-darwin-x64-3.1.4.tgz", + "integrity": "sha512-gqF3slTes3gZ5xv2n2f2o0TCs0R2HoY5pf8BW6BOV88H4e/mdV+oBZeCCci9x5WDJV2a+VSIXwre4d2CC2YG0A==", "cpu": [ "x64" ], @@ -1270,9 +1270,9 @@ ] }, "node_modules/@optave/codegraph-linux-arm64-gnu": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-arm64-gnu/-/codegraph-linux-arm64-gnu-3.1.3.tgz", - "integrity": "sha512-ceMXCR/W95ZTK5IMpEJMnxIdECRye+3qJVuY4bO79AtucKnrtox0BfTFWw1SFB6Iq5OyBWuLqfxcqIxlq2fNXg==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-arm64-gnu/-/codegraph-linux-arm64-gnu-3.1.4.tgz", + "integrity": "sha512-OfalkOvdWWuRodVKAxkaVJ/0qZSI8XxroSKx3jigKkovdPloX0L61dGDol8BxBa8T8tD9JFmyRo4csc3PSAYAg==", "cpu": [ "arm64" ], @@ -1283,9 +1283,9 @@ ] }, "node_modules/@optave/codegraph-linux-x64-gnu": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-x64-gnu/-/codegraph-linux-x64-gnu-3.1.3.tgz", - "integrity": "sha512-XpkP3WAdUFT437DMNvL+2izhlAvo8nrKrY+rng6cp3aH8KEuTT0IB/1a7yDkhRnsYgwFebuYv4w1pjr8YtQD2Q==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-x64-gnu/-/codegraph-linux-x64-gnu-3.1.4.tgz", + "integrity": "sha512-VmTiGlMku+tqpTG/xd8ux2fF1czSz2E9O2TfPAZGJngUXXBMkgbhGE2Ls8qD+qVeA5Nl480EAEyC0ebVgJ7/XQ==", "cpu": [ "x64" ], @@ -1296,9 +1296,9 @@ ] }, "node_modules/@optave/codegraph-linux-x64-musl": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-x64-musl/-/codegraph-linux-x64-musl-3.1.3.tgz", - "integrity": "sha512-CyF5o+V5gp39/2ig/xTmHFOn8FaXH5JJJVOrVvthGRBbeTq2FJI2PDDyYym4OOaNzr3kQzgSrfpQ1NiP+ToSzQ==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-linux-x64-musl/-/codegraph-linux-x64-musl-3.1.4.tgz", + "integrity": "sha512-FWciyCQvPFYZhHU47x3Ug/19EEFi07BBaihfUg02p1cZkyN2DA0C/jwnDgdgbR+AxRLqRdFitcY5KG+jVYbKGw==", "cpu": [ "x64" ], @@ -1309,9 +1309,9 @@ ] }, "node_modules/@optave/codegraph-win32-x64-msvc": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@optave/codegraph-win32-x64-msvc/-/codegraph-win32-x64-msvc-3.1.3.tgz", - "integrity": "sha512-Oy1J0nYLQ/VasDQ8KA0bNuLfzDHcdA/ITjQcZ8CBBRXMUWuusYflX/kNxb29Vo9fhj4PgoiYJcwdOKs++RgY3g==", + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/@optave/codegraph-win32-x64-msvc/-/codegraph-win32-x64-msvc-3.1.4.tgz", + "integrity": "sha512-cKCtk0bH2bJfMoPXAksX8VFbtSNuL3WGJKoVOP52YtHvsp/MMRwE/MZdbUf5ulhIL/oA/rMJQVXBnjpLc5oSNA==", "cpu": [ "x64" ], diff --git a/package.json b/package.json index af5a4fa2..fd29cee8 100644 --- a/package.json +++ b/package.json @@ -74,12 +74,12 @@ }, "optionalDependencies": { "@modelcontextprotocol/sdk": "^1.0.0", - "@optave/codegraph-darwin-arm64": "3.1.3", - "@optave/codegraph-darwin-x64": "3.1.3", - "@optave/codegraph-linux-arm64-gnu": "3.1.3", - "@optave/codegraph-linux-x64-gnu": "3.1.3", - "@optave/codegraph-linux-x64-musl": "3.1.3", - "@optave/codegraph-win32-x64-msvc": "3.1.3" + "@optave/codegraph-darwin-arm64": "3.1.4", + "@optave/codegraph-darwin-x64": "3.1.4", + "@optave/codegraph-linux-arm64-gnu": "3.1.4", + "@optave/codegraph-linux-x64-gnu": "3.1.4", + "@optave/codegraph-linux-x64-musl": "3.1.4", + "@optave/codegraph-win32-x64-msvc": "3.1.4" }, "devDependencies": { "@biomejs/biome": "^2.4.4", From 36ffc3bbbe9f2b99d2af27c61383b2b1191bd42e Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 16 Mar 2026 01:23:40 -0600 Subject: [PATCH 12/12] docs: add dogfood report for v3.1.4 --- generated/dogfood/DOGFOOD_REPORT_v3.1.4.md | 407 +++++++++++++++++++++ 1 file changed, 407 insertions(+) create mode 100644 generated/dogfood/DOGFOOD_REPORT_v3.1.4.md diff --git a/generated/dogfood/DOGFOOD_REPORT_v3.1.4.md b/generated/dogfood/DOGFOOD_REPORT_v3.1.4.md new file mode 100644 index 00000000..24f7caf7 --- /dev/null +++ b/generated/dogfood/DOGFOOD_REPORT_v3.1.4.md @@ -0,0 +1,407 @@ +# Dogfooding Report: @optave/codegraph@3.1.4 + +**Date:** 2026-03-16 +**Platform:** Windows 11 Pro (10.0.26200), x86_64, Node v22.18.0 +**Native binary:** @optave/codegraph-win32-x64-msvc@3.1.4 +**Active engine:** native (v3.1.4) +**Target repo:** codegraph itself (398 files) + +--- + +## 1. Setup & Installation + +| Step | Result | +|------|--------| +| `npm install @optave/codegraph@3.1.4` | OK — 144 packages, 0 vulnerabilities | +| `npx codegraph --version` | `3.1.4` | +| Native binary package | `@optave/codegraph-win32-x64-msvc` v3.1.4 (npm package.json) | +| `npx codegraph info` | `Native version: 3.1.4 (binary reports 3.1.3 — stale)` | +| Active engine | `native (v3.1.4)` | + +**Note:** The `info` output shows "binary reports 3.1.3 — stale" because the Rust-compiled version string is baked at compile time and hasn't been bumped. The npm package version IS 3.1.4 and the engine loads correctly. This is cosmetic. + +**Issue found:** `optionalDependencies` in the published `package.json` pins native binaries to `3.1.3`, not `3.1.4`. npm resolves the correct version because 3.1.4 exists on the registry, but the pin is wrong. Filed as #454. + +--- + +## 2. Cold Start (Pre-Build) + +All commands tested without a graph database present: + +| Command | Result | Notes | +|---------|--------|-------| +| `query` | PASS | `codegraph [DB_ERROR]: No codegraph database found...` | +| `stats` | PASS | Same graceful DB_ERROR | +| `deps` | PASS | Same | +| `where` | PASS | Same | +| `map` | PASS | Same | +| `cycles` | PASS | Same | +| `impact` | PASS | Same | +| `context` | PASS | Same | +| `fn-impact` | PASS | Same | +| `diff-impact` | PASS | Same | +| `structure` | PASS | Same | +| `export` | PASS | Same | +| `search` | PASS | Same | +| `embed` | PASS | Same | +| `triage` | PASS | Same | +| `audit` | PASS | Same | +| `roles` | PASS | Same | +| `communities` | PASS | Same | +| `complexity` | PASS | Same | +| `models` | PASS | Lists 7 embedding models | +| `registry list` | PASS | Shows registered repos | +| `info` | PASS | Shows diagnostics | + +All commands fail gracefully with a helpful message pointing to `codegraph build`. + +**Commands not found in v3.1.4 CLI:** +- `fn` — use `query` instead +- `explain` — use `audit` or `context` instead +- `hotspots` — use `triage --level file` instead + +These are listed in the dogfood skill template but were removed or renamed in the vertical slice refactoring (v3.1.4). + +--- + +## 3. Full Command Sweep + +Build: `npx codegraph build ` — 398 files, 5323 nodes, 11466 edges, native engine. + +| Command | Status | Notes | +|---------|--------|-------| +| `query buildGraph` | PASS | Shows 17 callees | +| `query buildGraph --json` | PASS | Valid JSON | +| `query buildGraph --depth 1` | PASS | Depth-limited | +| `query nonexistent_symbol_xyz` | PASS | "No function/method/class matching..." | +| `deps src/builder/pipeline.js` | PASS | 17 imports, 9 importers | +| `deps nonexistent.js` | PASS | "No file matching..." | +| `impact src/builder/pipeline.js` | PASS | 9 level-1 impacted files | +| `map -n 5` | PASS | Top 5 connected nodes | +| `map --no-tests` | PASS | Same counts (no test filtering visible in totals) | +| `fn-impact buildGraph` | PASS | 14 level-1 impacted functions | +| `fn-impact buildGraph --json` | PASS | Valid JSON | +| `context buildGraph` | PASS | Source, deps, callers, tests | +| `context buildGraph --no-source` | PASS | Metadata only | +| `context buildGraph --include-tests` | PASS | Includes test source | +| `context buildGraph --json` | PASS | Valid JSON | +| `where buildGraph` | PASS | Definition + 50+ use sites | +| `where -f src/builder/pipeline.js` | PASS | File overview mode | +| `where buildGraph --json` | PASS | Valid JSON | +| `diff-impact` | PASS | "No changes detected" | +| `diff-impact --staged` | PASS | "No changes detected" | +| `cycles` | PASS | "No circular dependencies detected" | +| `cycles --functions` | PASS | 9/11 function-level cycles | +| `cycles --json` | PASS | Valid JSON | +| `structure --depth 1` | PASS | 5 top-level directories | +| `structure .` | PASS | Same (v2.2.0 regression fixed) | +| `export -f dot` | PASS | Valid DOT output | +| `export -f mermaid` | PASS | Valid Mermaid flowchart | +| `export -f json` | PASS | Valid JSON with nodes/edges | +| `export --functions -f dot` | PASS | Function-level DOT | +| `stats` | PASS | Full overview with nodes, edges, cycles, quality | +| `stats --json` | PASS | Valid JSON | +| `complexity` | PASS | 1492 functions, sorted by cognitive | +| `complexity buildGraph` | PASS | Shows cog=37, cyc=31 | +| `triage` | PASS | Risk-ranked audit queue | +| `roles` | PASS | 1971 classified (dead=974, core=417) → after full rebuild: 4871 (dead=3983, core=578) | +| `communities` | PASS | 72 communities, modularity 0.55 | +| `path buildGraph openDb` | PASS | 1-hop path | +| `sequence buildGraph` | PASS | Mermaid sequence diagram, 161 participants | +| `cfg buildGraph` | PASS | 0 blocks (expected — async function) | +| `dataflow buildGraph` | PASS | No edges (top-level orchestrator) | +| `children PipelineContext` | PASS | Lists all properties | +| `exports src/analysis/context.js` | PASS | 2 exported, 20 internal | +| `exports src/db.js` | NOTE | "No exported symbols found" — db.js is a barrel re-export file with no function nodes | +| `flow buildGraph` | PASS | 454 nodes reached, 155 leaves | +| `ast "buildGraph"` | PASS | 2 AST string nodes found | +| `owners` | PASS | "No CODEOWNERS file found" | +| `batch context buildGraph openDb` | PASS | JSON output (always JSON, no --json flag) | +| `check` | PASS | Manifesto rules: 3 warnings, 0 failures | +| `plot -o plot.html` | PASS | HTML file written | +| `co-change` | PASS | "No co-change pairs found" (no --analyze run) | +| `audit buildGraph` | PASS | Full composite report | +| `search "build graph"` (pre-embed) | PASS | "No embeddings found" warning | +| `embed --model minilm` | PASS | 1508 symbols embedded | +| `search "build dependency graph"` | PASS | Top result: buildDependencyGraph (54.6% semantic) | +| `search "parse;resolve" (multi-query)` | PASS | RRF fusion works | +| `search --json` | PASS | Valid JSON | +| `registry add/list/remove/prune` | PASS | Full workflow works | +| `snapshot save/list` | PASS | Help shows subcommands | + +### Edge Cases Tested + +| Scenario | Result | +|----------|--------| +| Non-existent symbol: `query nonexistent` | PASS — graceful message | +| Non-existent file: `deps nonexistent.js` | PASS — graceful message | +| `structure .` (v2.2.0 regression) | PASS — fixed | +| `--json` on all supporting commands | PASS — valid JSON | +| `batch` with `--json` flag | NOTE — batch rejects `--json` (output is always JSON) | +| `search` with no embeddings | PASS — warns, doesn't crash | +| Pipe output: `map --json | head -1` | PASS — clean JSON | +| `registry prune --ttl 0` | PASS — pruned 36 stale entries | + +--- + +## 4. Rebuild & Staleness + +| Test | Result | +|------|--------| +| Incremental no-op | PASS — "No changes detected. Graph is up to date." | +| Touch file (no content change) | PASS — mtime+size tier detects change, hash tier skips it: "Self-healed mtime/size for 1 files" | +| Content change → incremental rebuild | PASS — only changed file + reverse deps re-parsed | +| Full rebuild (`--no-incremental`) | PASS — 5323 nodes, 11466 edges | +| Node counts stable after no-op | PASS — identical | + +### Embed → Rebuild → Search Pipeline + +| Step | Result | +|------|--------| +| Embed with minilm (1508 symbols) | PASS | +| Search after embed | PASS — relevant results | +| Full rebuild after embed | Not re-tested (would clear embeddings) | + +--- + +## 5. Engine Comparison + +### Build Results + +| Metric | Native | WASM | Delta | +|--------|--------|------|-------| +| Nodes | 5323 | 5318 | +5 (0.1%) | +| Edges | 11466 | 11496 | -30 (0.3%) | +| Functions | 1129 | 1129 | 0 | +| Methods | 363 | 360 | +3 | +| Parameters | 2630 | 2664 | -34 | +| Constants | 354 | 318 | +36 | +| Call edges | 2348 | 2348 | 0 | +| Contains edges | 5316 | 5311 | +5 | +| Dynamic imports | 127 | 128 | -1 | +| Complexity fns | 1492 | 1488 | +4 | +| Function cycles | 11 | 11 | 0 | + +Parity is excellent. Minor differences in parameter/constant/method counts are expected due to extraction heuristic differences between Rust and JS parsers. Call edges are identical. + +### Performance Benchmarks + +**Build Benchmark:** + +| Metric | Native | WASM | +|--------|--------|------| +| Full build | 1,350ms | — | +| Per-file build | 3.4ms | — | +| No-op rebuild | 15ms | — | +| 1-file rebuild | 550ms | — | +| Query (fnDeps) | 0.8ms | — | +| Query (fnImpact) | 0.8ms | — | +| Query (path) | 0.8ms | — | +| Query (roles) | 6.4ms | — | + +**Native build phase breakdown:** + +| Phase | Time | +|-------|------| +| Setup | 56.9ms | +| Parse | 154.2ms | +| Insert | 235.0ms | +| Resolve | 5.6ms | +| Edges | 77.2ms | +| Structure | 8.6ms | +| Roles | 18.4ms | +| AST | 480.9ms | +| Complexity | 33.7ms | +| CFG | 70.8ms | +| Dataflow | 94.5ms | +| Finalize | 106.2ms | + +**Complexity sanity check:** Native `complexityMs` (33.7ms) is much lower than WASM equivalent, confirming the native binary is not stale for complexity computation. + +**Query Benchmark:** + +| Metric | Native | WASM | +|--------|--------|------| +| fnDeps depth 1 | 0.8ms | 0.8ms | +| fnDeps depth 3 | 0.8ms | 0.8ms | +| fnDeps depth 5 | 0.8ms | 0.7ms | +| fnImpact depth 1 | 0.8ms | 0.7ms | +| fnImpact depth 3 | 0.7ms | 0.7ms | +| fnImpact depth 5 | 0.7ms | 0.7ms | +| diff-impact | 16.7ms | 16.0ms | + +Query performance is equivalent across engines (expected — queries run on SQLite, not the parser). + +**Incremental Benchmark:** + +| Metric | Native | WASM | +|--------|--------|------| +| Full build | 1,471ms | — | +| No-op rebuild | 13ms | — | +| 1-file rebuild | 542ms | — | +| Import resolution (native batch) | 2.7ms (175 imports) | — | +| Import resolution (JS fallback) | 7.1ms (175 imports) | — | + +--- + +## 6. Release-Specific Tests + +### What changed in v3.1.4 + +Major release: Phase 3 architectural refactoring (11 of 14 roadmap tasks). Key changes: + +| Feature/Fix | Test | Result | +|-------------|------|--------| +| **Unified graph model** (`src/graph/`) | `communities`, `roles`, `triage` all work | PASS | +| **Qualified names** (migration v15) | DB has `qualified_name`, `scope`, `visibility` columns; 4871 symbols have qualified names | PASS | +| **InMemoryRepository** | All 1862 tests pass (includes new unit tests) | PASS | +| **queries.js decomposition** → `src/analysis/` | `context`, `where`, `fn-impact`, `audit` all work | PASS | +| **Composable MCP tool registry** | MCP server returns 31/32 tools correctly | PASS | +| **CLI split** → `src/commands/` | All CLI commands work | PASS | +| **Domain error hierarchy** | `DB_ERROR`, `ENGINE_UNAVAILABLE` shown in error outputs | PASS | +| **Build pipeline stages** | `--verbose` shows named stages, phase timers complete | PASS | +| **Embeddings extraction** → `src/embeddings/` | embed + search pipeline works | PASS | +| **Presentation layer** → `src/presentation/` | DOT, Mermaid, JSON, HTML plot all generate correctly | PASS | +| **better-sqlite3 12.8.0** | All DB operations work, 1862 tests pass | PASS | + +--- + +## 7. Additional Testing + +### MCP Server + +| Test | Result | +|------|--------| +| Single-repo mode (default) | PASS — 31 tools, no `list_repos`, no `repo` param | +| Multi-repo mode (`--multi-repo`) | PASS — 32 tools, `list_repos` present, `repo` param on all tools | +| JSON-RPC initialize | PASS — protocol version 2024-11-05 | + +### Programmatic API + +| Test | Result | +|------|--------| +| ESM `import * from '@optave/codegraph'` | PASS — 56 exports including all data functions, errors, constants | +| CJS `require('@optave/codegraph')` | **FAIL** — `ERR_PACKAGE_PATH_NOT_EXPORTED` (#455) | + +Key exports verified: `buildGraph`, `loadConfig`, `contextData`, `explainData`, `whereData`, `fnDepsData`, `diffImpactData`, `statsData`, `EXTENSIONS`, `IGNORE_DIRS`, `EVERY_SYMBOL_KIND`, `AnalysisError`, `DbError`, `ConfigError`. + +### Registry Flow + +| Step | Result | +|------|--------| +| `registry add -n ` | PASS | +| `registry list` / `registry list --json` | PASS | +| `registry remove ` | PASS | +| `registry prune --ttl 0` | PASS — pruned 36 stale entries | + +### Config + +| Test | Result | +|------|--------| +| `.codegraphrc.json` loaded | PASS — verbose output confirms config loaded | +| Build respects config | PASS | + +--- + +## 8. Bugs Found + +### BUG 1: Native binary optionalDependencies pinned to 3.1.3 (Medium) + +- **Issue:** [#454](https://github.com/optave/codegraph/issues/454) +- **PR:** Fixed on this branch (package.json updated) +- **Symptoms:** `codegraph info` shows "binary reports 3.1.3 — stale". Published package.json has wrong pins. +- **Root cause:** Release workflow or `sync-native-versions.js` didn't update pins before publishing v3.1.4. +- **Fix applied:** Updated all `@optave/codegraph-*` pins from `3.1.3` to `3.1.4` in `optionalDependencies`. + +### BUG 2: CJS require() fails with ERR_PACKAGE_PATH_NOT_EXPORTED (Medium) + +- **Issue:** [#455](https://github.com/optave/codegraph/issues/455) +- **PR:** Open — needs decision on CJS support strategy +- **Symptoms:** `const cg = require('@optave/codegraph')` throws `ERR_PACKAGE_PATH_NOT_EXPORTED`. +- **Root cause:** Package `exports` field only defines `import` condition, no `require` or `default`. Package is `"type": "module"`. +- **Fix applied:** None yet — needs architectural decision (CJS wrapper, `default` export, or ESM-only documentation). + +--- + +## 9. Suggestions for Improvement + +### 9.1 Update dogfood skill command references +The dogfood skill template references `fn`, `explain`, and `hotspots` as standalone CLI commands. These no longer exist in v3.1.4 — they were absorbed into `query`, `audit`/`context`, and `triage --level file` during the vertical slice refactoring. Update the skill to match current CLI. + +### 9.2 `exports` command for barrel files +`exports src/db.js` returns "No exported symbols found" for barrel/re-export files. While technically correct (no function nodes in the file), it's confusing since `db.js` is a heavily-imported file (86 fan-in). Consider showing re-exported symbols with a note that they originate from sub-modules. + +### 9.3 MCP tool count in skill template +The skill says to expect 23 tools (single-repo) / 24 (multi-repo). The actual count is now 31/32 after all the new commands were added. Update the expected counts. + +### 9.4 `info` "stale" warning is confusing +The "binary reports 3.1.3 — stale" warning appears even when the native engine works correctly at the right version. Consider suppressing the stale warning when the npm package version matches the expected version, or rewording it to clarify it's cosmetic. + +--- + +## 10. Testing Plan + +### General Testing Plan (Any Release) + +- [ ] Install from npm, verify version +- [ ] Native binary loads correctly for platform +- [ ] All commands fail gracefully without a graph DB +- [ ] Build completes on self (codegraph repo) +- [ ] All query commands produce correct output +- [ ] `--json` flag produces valid JSON on all supporting commands +- [ ] Incremental no-op: "Graph is up to date" +- [ ] Touch file: hash tier skips unchanged content +- [ ] Content change: only changed + reverse-deps re-parsed +- [ ] `--no-incremental` produces consistent results +- [ ] Engine comparison: native vs WASM parity within 5% +- [ ] Embed + search pipeline works end-to-end +- [ ] MCP server returns correct tool list (single-repo and multi-repo) +- [ ] Registry add/list/remove/prune workflow +- [ ] All tests pass (`npm test`) +- [ ] Lint passes (`npm run lint`) + +### Release-Specific Testing Plan (v3.1.4) + +- [ ] Qualified names populated in DB (migration v15) +- [ ] `children` command works with new hierarchy +- [ ] All decomposed modules work (analysis/, commands/, embeddings/, graph/, presentation/) +- [ ] Domain errors shown in CLI output (DB_ERROR, ENGINE_UNAVAILABLE, etc.) +- [ ] InMemoryRepository unit tests pass +- [ ] Build pipeline stages visible with `--verbose` +- [ ] Phase timers (setupMs, finalizeMs) present in build output +- [ ] better-sqlite3 12.8.0 compatibility verified + +### Proposed Additional Tests + +- [ ] Test `branch-compare` command (requires two refs) +- [ ] Test `watch` mode with file modification detection +- [ ] Test concurrent builds (two simultaneous `build` calls) +- [ ] Test `.codegraphrc.json` `include`/`exclude` patterns with measurable filtering +- [ ] Test `apiKeyCommand` credential resolution +- [ ] Test database migration upgrade path (v4 → v15 with existing old DB) +- [ ] Test `snapshot save` / `snapshot restore` round-trip +- [ ] Test embedding model dimension mismatch warning +- [ ] Test `co-change --analyze` followed by `co-change` queries + +--- + +## 11. Overall Assessment + +v3.1.4 is a solid release delivering a massive architectural refactoring (Phase 3 vertical slice) without functional regressions. All 398 files parse correctly, all CLI commands work, engine parity is excellent (0.1% node difference), and the test suite passes (1862 tests, 0 failures). The new qualified names, graph model, and decomposed modules all function correctly. + +Two bugs found: +1. **Native binary pins stale** (#454) — cosmetic/correctness issue, trivial fix +2. **CJS require() broken** (#455) — affects CJS consumers, needs design decision + +The vertical slice refactoring is impressively clean — despite moving thousands of lines across modules, the CLI surface and MCP tools all work identically to before. Build performance is good (1.35s full build, 13ms no-op, 550ms 1-file rebuild). Query latency is sub-millisecond. + +**Rating: 8/10** — Functionally excellent. Deducted for the stale native binary pins (should be caught by release automation) and the CJS export gap. The architectural improvements are substantial and well-executed. + +--- + +## 12. Issues & PRs Created + +| Type | Number | Title | Status | +|------|--------|-------|--------| +| Issue | [#454](https://github.com/optave/codegraph/issues/454) | bug: native binary optionalDependencies pinned to 3.1.3 | Fixed on this branch | +| Issue | [#455](https://github.com/optave/codegraph/issues/455) | bug: CJS require() fails with ERR_PACKAGE_PATH_NOT_EXPORTED | Open |