From 82326e0296d33f68f981cbfd4daee28392a303c1 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 23 Mar 2026 19:48:56 -0600 Subject: [PATCH 01/26] feat(types): migrate ast-analysis/ to TypeScript (Phase 6a) Migrate all 18 ast-analysis/ files from JavaScript to TypeScript: - engine, metrics, shared, visitor, visitor-utils (core framework) - 9 language rule files (csharp, go, java, javascript, php, python, ruby, rust) + barrel - 4 visitor implementations (ast-store, cfg, complexity, dataflow) Add detailed type interfaces for CfgRulesConfig, DataflowRulesConfig, ComplexityRules, HalsteadRules, HalsteadDerivedMetrics, and LanguageRuleModule to src/types.ts. Update ROADMAP progress to 94/283 (~33%). Impact: 312 functions changed, 140 affected --- docs/roadmap/ROADMAP.md | 9 +- src/ast-analysis/{engine.js => engine.ts} | 164 ++++--- src/ast-analysis/{metrics.js => metrics.ts} | 18 +- .../rules/{csharp.js => csharp.ts} | 20 +- src/ast-analysis/rules/{go.js => go.ts} | 22 +- src/ast-analysis/rules/{index.js => index.ts} | 28 +- src/ast-analysis/rules/{java.js => java.ts} | 20 +- .../rules/{javascript.js => javascript.ts} | 22 +- src/ast-analysis/rules/{php.js => php.ts} | 20 +- .../rules/{python.js => python.ts} | 20 +- src/ast-analysis/rules/{ruby.js => ruby.ts} | 20 +- src/ast-analysis/rules/{rust.js => rust.ts} | 20 +- src/ast-analysis/{shared.js => shared.ts} | 87 ++-- .../{visitor-utils.js => visitor-utils.ts} | 61 ++- src/ast-analysis/{visitor.js => visitor.ts} | 60 ++- ...-store-visitor.js => ast-store-visitor.ts} | 86 ++-- .../{cfg-visitor.js => cfg-visitor.ts} | 403 +++++++++++------- ...exity-visitor.js => complexity-visitor.ts} | 137 +++--- ...ataflow-visitor.js => dataflow-visitor.ts} | 212 ++++++--- src/types.ts | 121 +++++- 20 files changed, 983 insertions(+), 567 deletions(-) rename src/ast-analysis/{engine.js => engine.ts} (73%) rename src/ast-analysis/{metrics.js => metrics.ts} (88%) rename src/ast-analysis/rules/{csharp.js => csharp.ts} (92%) rename src/ast-analysis/rules/{go.js => go.ts} (91%) rename src/ast-analysis/rules/{index.js => index.ts} (78%) rename src/ast-analysis/rules/{java.js => java.ts} (92%) rename src/ast-analysis/rules/{javascript.js => javascript.ts} (93%) rename src/ast-analysis/rules/{php.js => php.ts} (93%) rename src/ast-analysis/rules/{python.js => python.ts} (92%) rename src/ast-analysis/rules/{ruby.js => ruby.ts} (92%) rename src/ast-analysis/rules/{rust.js => rust.ts} (91%) rename src/ast-analysis/{shared.js => shared.ts} (74%) rename src/ast-analysis/{visitor-utils.js => visitor-utils.ts} (75%) rename src/ast-analysis/{visitor.js => visitor.ts} (71%) rename src/ast-analysis/visitors/{ast-store-visitor.js => ast-store-visitor.ts} (63%) rename src/ast-analysis/visitors/{cfg-visitor.js => cfg-visitor.ts} (70%) rename src/ast-analysis/visitors/{complexity-visitor.js => complexity-visitor.ts} (57%) rename src/ast-analysis/visitors/{dataflow-visitor.js => dataflow-visitor.ts} (65%) diff --git a/docs/roadmap/ROADMAP.md b/docs/roadmap/ROADMAP.md index 4614c75e..b20feff0 100644 --- a/docs/roadmap/ROADMAP.md +++ b/docs/roadmap/ROADMAP.md @@ -1080,7 +1080,7 @@ npm workspaces (`package.json` `workspaces`), `pnpm-workspace.yaml`, and `lerna. ## Phase 5 -- TypeScript Migration -> **Status:** In Progress — 76 of 283 source files migrated (~27%), 207 `.js` files remaining +> **Status:** In Progress — 94 of 283 source files migrated (~33%), 189 `.js` files remaining **Goal:** Migrate the codebase from plain JavaScript to TypeScript, leveraging the clean module boundaries established in Phase 3. Incremental module-by-module migration starting from leaf modules inward. @@ -1149,11 +1149,11 @@ Migrate modules that implement domain logic and Phase 3 interfaces. Some migrate ### 5.5 -- Orchestration & Public API Migration (In Progress) -Migrate top-level orchestration, features, and entry points. Some migrated via [#555](https://github.com/optave/codegraph/pull/555), 159 files remaining. +Migrate top-level orchestration, features, and entry points. Some migrated via [#555](https://github.com/optave/codegraph/pull/555), 141 files remaining. -**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts` +**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts`, `ast-analysis/` (all 18 files — engine, metrics, shared, visitor, visitor-utils, 9 language rules, 4 visitors) -**Remaining (159):** +**Remaining (141):** | Module | Files | Notes | |--------|-------|-------| @@ -1161,7 +1161,6 @@ Migrate top-level orchestration, features, and entry points. Some migrated via [ | `mcp/tools/` | 36 | Individual MCP tool handlers + barrel | | `presentation/` | 28 | Presentation formatters (14 files), `queries-cli/` (7 files), sequence-renderer, viewer, export, etc. | | `features/` | 21 | audit, batch, boundaries, cfg, check, cochange, communities, complexity, dataflow, flow, graph-enrichment, manifesto, owners, sequence, snapshot, structure, triage, ast, branch-compare, `shared/find-nodes` | -| `ast-analysis/` | 18 | AST analysis framework, visitors (4), language-specific rules (9), engine, metrics, shared, visitor-utils | | `index.js` | 1 | Public API exports (stale — `.ts` exists) | **Stale `.js` counterparts to delete (13 files):** `domain/graph/builder.js`, `domain/graph/builder/{context,helpers,pipeline}.js`, `domain/graph/watcher.js`, `domain/search/{generator,index,models}.js`, `features/export.js`, `mcp/{index,middleware,server,tool-registry}.js` — these have `.ts` counterparts already diff --git a/src/ast-analysis/engine.js b/src/ast-analysis/engine.ts similarity index 73% rename from src/ast-analysis/engine.js rename to src/ast-analysis/engine.ts index 5da9fe5e..050b0b5d 100644 --- a/src/ast-analysis/engine.js +++ b/src/ast-analysis/engine.ts @@ -19,6 +19,22 @@ import path from 'node:path'; import { performance } from 'node:perf_hooks'; import { bulkNodeIdsByFile } from '../db/index.js'; import { debug } from '../infrastructure/logger.js'; +import type { + AnalysisOpts, + AnalysisTiming, + ASTNodeRow, + BetterSqlite3Database, + CfgBlock, + CfgEdge, + DataflowResult, + Definition, + EngineOpts, + ExtractorOutput, + TreeSitterNode, + Visitor, + WalkOptions, + WalkResults, +} from '../types.js'; import { computeLOCMetrics, computeMaintainabilityIndex } from './metrics.js'; import { AST_TYPE_MAPS, @@ -35,6 +51,35 @@ import { createCfgVisitor } from './visitors/cfg-visitor.js'; import { createComplexityVisitor } from './visitors/complexity-visitor.js'; import { createDataflowVisitor } from './visitors/dataflow-visitor.js'; +// ─── Visitor result shapes (internal, not exported) ────────────────────── + +interface ComplexityFuncResult { + funcNode: TreeSitterNode; + funcName: string | null; + metrics: { + cognitive: number; + cyclomatic: number; + maxNesting: number; + halstead?: { volume: number; difficulty: number; effort: number; bugs: number }; + }; +} + +interface CfgFuncResult { + funcNode: TreeSitterNode; + blocks: CfgBlock[]; + edges: CfgEdge[]; + cyclomatic?: number; +} + +interface SetupResult { + visitors: Visitor[]; + walkerOpts: WalkOptions; + astVisitor: Visitor | null; + complexityVisitor: Visitor | null; + cfgVisitor: Visitor | null; + dataflowVisitor: Visitor | null; +} + // ─── Extension sets for quick language-support checks ──────────────────── const CFG_EXTENSIONS = buildExtensionSet(CFG_RULES); @@ -44,15 +89,19 @@ const WALK_EXTENSIONS = buildExtensionSet(AST_TYPE_MAPS); // ─── Lazy imports (heavy modules loaded only when needed) ──────────────── -let _parserModule = null; -async function getParserModule() { +let _parserModule: Awaited | null = null; +async function getParserModule(): Promise { if (!_parserModule) _parserModule = await import('../domain/parser.js'); return _parserModule; } // ─── WASM pre-parse ───────────────────────────────────────────────────── -async function ensureWasmTreesIfNeeded(fileSymbols, opts, rootDir) { +async function ensureWasmTreesIfNeeded( + fileSymbols: Map, + opts: AnalysisOpts, + rootDir: string, +): Promise { const doComplexity = opts.complexity !== false; const doCfg = opts.cfg !== false; const doDataflow = opts.dataflow !== false; @@ -91,15 +140,21 @@ async function ensureWasmTreesIfNeeded(fileSymbols, opts, rootDir) { try { const { ensureWasmTrees } = await getParserModule(); await ensureWasmTrees(fileSymbols, rootDir); - } catch (err) { - debug(`ensureWasmTrees failed: ${err.message}`); + } catch (err: unknown) { + debug(`ensureWasmTrees failed: ${(err as Error).message}`); } } } // ─── Per-file visitor setup ───────────────────────────────────────────── -function setupVisitors(db, relPath, symbols, langId, opts) { +function setupVisitors( + db: BetterSqlite3Database, + relPath: string, + symbols: ExtractorOutput, + langId: string, + opts: AnalysisOpts, +): SetupResult { const ext = path.extname(relPath).toLowerCase(); const defs = symbols.definitions || []; const doAst = opts.ast !== false; @@ -107,18 +162,18 @@ function setupVisitors(db, relPath, symbols, langId, opts) { const doCfg = opts.cfg !== false; const doDataflow = opts.dataflow !== false; - const visitors = []; - const walkerOpts = { - functionNodeTypes: new Set(), - nestingNodeTypes: new Set(), - getFunctionName: (_node) => null, + const visitors: Visitor[] = []; + const walkerOpts: WalkOptions = { + functionNodeTypes: new Set(), + nestingNodeTypes: new Set(), + getFunctionName: (_node: TreeSitterNode) => null, }; // AST-store visitor - let astVisitor = null; + let astVisitor: Visitor | null = null; const astTypeMap = AST_TYPE_MAPS.get(langId); if (doAst && astTypeMap && WALK_EXTENSIONS.has(ext) && !symbols.astNodes?.length) { - const nodeIdMap = new Map(); + const nodeIdMap = new Map(); for (const row of bulkNodeIdsByFile(db, relPath)) { nodeIdMap.set(`${row.name}|${row.kind}|${row.line}`, row.id); } @@ -127,7 +182,7 @@ function setupVisitors(db, relPath, symbols, langId, opts) { } // Complexity visitor (file-level mode) - let complexityVisitor = null; + let complexityVisitor: Visitor | null = null; const cRules = COMPLEXITY_RULES.get(langId); const hRules = HALSTEAD_RULES.get(langId); if (doComplexity && cRules) { @@ -138,20 +193,21 @@ function setupVisitors(db, relPath, symbols, langId, opts) { complexityVisitor = createComplexityVisitor(cRules, hRules, { fileLevelWalk: true, langId }); visitors.push(complexityVisitor); - for (const t of cRules.nestingNodes) walkerOpts.nestingNodeTypes.add(t); + for (const t of cRules.nestingNodes) walkerOpts.nestingNodeTypes?.add(t); const dfRules = DATAFLOW_RULES.get(langId); - walkerOpts.getFunctionName = (node) => { + walkerOpts.getFunctionName = (node: TreeSitterNode): string | null => { const nameNode = node.childForFieldName('name'); if (nameNode) return nameNode.text; - if (dfRules) return getFuncName(node, dfRules); + // biome-ignore lint/suspicious/noExplicitAny: DataflowRulesConfig is structurally compatible at runtime + if (dfRules) return getFuncName(node, dfRules as any); return null; }; } } // CFG visitor - let cfgVisitor = null; + let cfgVisitor: Visitor | null = null; const cfgRulesForLang = CFG_RULES.get(langId); if (doCfg && cfgRulesForLang && CFG_EXTENSIONS.has(ext)) { const needsWasmCfg = defs.some( @@ -168,7 +224,7 @@ function setupVisitors(db, relPath, symbols, langId, opts) { } // Dataflow visitor - let dataflowVisitor = null; + let dataflowVisitor: Visitor | null = null; const dfRules = DATAFLOW_RULES.get(langId); if (doDataflow && dfRules && DATAFLOW_EXTENSIONS.has(ext) && !symbols.dataflow) { dataflowVisitor = createDataflowVisitor(dfRules); @@ -180,14 +236,15 @@ function setupVisitors(db, relPath, symbols, langId, opts) { // ─── Result storage helpers ───────────────────────────────────────────── -function storeComplexityResults(results, defs, langId) { - const complexityResults = results.complexity || []; - const resultByLine = new Map(); +function storeComplexityResults(results: WalkResults, defs: Definition[], langId: string): void { + // biome-ignore lint/complexity/useLiteralKeys: bracket notation required by noPropertyAccessFromIndexSignature + const complexityResults = (results['complexity'] || []) as ComplexityFuncResult[]; + const resultByLine = new Map(); for (const r of complexityResults) { if (r.funcNode) { const line = r.funcNode.startPosition.row + 1; if (!resultByLine.has(line)) resultByLine.set(line, []); - resultByLine.get(line).push(r); + resultByLine.get(line)?.push(r); } } for (const def of defs) { @@ -221,14 +278,15 @@ function storeComplexityResults(results, defs, langId) { } } -function storeCfgResults(results, defs) { - const cfgResults = results.cfg || []; - const cfgByLine = new Map(); +function storeCfgResults(results: WalkResults, defs: Definition[]): void { + // biome-ignore lint/complexity/useLiteralKeys: bracket notation required by noPropertyAccessFromIndexSignature + const cfgResults = (results['cfg'] || []) as CfgFuncResult[]; + const cfgByLine = new Map(); for (const r of cfgResults) { if (r.funcNode) { const line = r.funcNode.startPosition.row + 1; if (!cfgByLine.has(line)) cfgByLine.set(line, []); - cfgByLine.get(line).push(r); + cfgByLine.get(line)?.push(r); } } for (const def of defs) { @@ -254,7 +312,7 @@ function storeCfgResults(results, defs) { def.complexity.cyclomatic = cfgResult.cyclomatic; const { loc, halstead } = def.complexity; const volume = halstead ? halstead.volume : 0; - const commentRatio = loc?.loc > 0 ? loc.commentLines / loc.loc : 0; + const commentRatio = loc && loc.loc > 0 ? loc.commentLines / loc.loc : 0; def.complexity.maintainabilityIndex = computeMaintainabilityIndex( volume, cfgResult.cyclomatic, @@ -269,14 +327,21 @@ function storeCfgResults(results, defs) { // ─── Build delegation ─────────────────────────────────────────────────── -async function delegateToBuildFunctions(db, fileSymbols, rootDir, opts, engineOpts, timing) { +async function delegateToBuildFunctions( + db: BetterSqlite3Database, + fileSymbols: Map, + rootDir: string, + opts: AnalysisOpts, + engineOpts: EngineOpts | undefined, + timing: AnalysisTiming, +): Promise { if (opts.ast !== false) { const t0 = performance.now(); try { const { buildAstNodes } = await import('../features/ast.js'); await buildAstNodes(db, fileSymbols, rootDir, engineOpts); - } catch (err) { - debug(`buildAstNodes failed: ${err.message}`); + } catch (err: unknown) { + debug(`buildAstNodes failed: ${(err as Error).message}`); } timing.astMs = performance.now() - t0; } @@ -286,8 +351,8 @@ async function delegateToBuildFunctions(db, fileSymbols, rootDir, opts, engineOp try { const { buildComplexityMetrics } = await import('../features/complexity.js'); await buildComplexityMetrics(db, fileSymbols, rootDir, engineOpts); - } catch (err) { - debug(`buildComplexityMetrics failed: ${err.message}`); + } catch (err: unknown) { + debug(`buildComplexityMetrics failed: ${(err as Error).message}`); } timing.complexityMs = performance.now() - t0; } @@ -297,8 +362,8 @@ async function delegateToBuildFunctions(db, fileSymbols, rootDir, opts, engineOp try { const { buildCFGData } = await import('../features/cfg.js'); await buildCFGData(db, fileSymbols, rootDir, engineOpts); - } catch (err) { - debug(`buildCFGData failed: ${err.message}`); + } catch (err: unknown) { + debug(`buildCFGData failed: ${(err as Error).message}`); } timing.cfgMs = performance.now() - t0; } @@ -308,8 +373,8 @@ async function delegateToBuildFunctions(db, fileSymbols, rootDir, opts, engineOp try { const { buildDataflowEdges } = await import('../features/dataflow.js'); await buildDataflowEdges(db, fileSymbols, rootDir, engineOpts); - } catch (err) { - debug(`buildDataflowEdges failed: ${err.message}`); + } catch (err: unknown) { + debug(`buildDataflowEdges failed: ${(err as Error).message}`); } timing.dataflowMs = performance.now() - t0; } @@ -317,18 +382,14 @@ async function delegateToBuildFunctions(db, fileSymbols, rootDir, opts, engineOp // ─── Public API ────────────────────────────────────────────────────────── -/** - * Run all enabled AST analyses in a coordinated pass. - * - * @param {object} db - open better-sqlite3 database (read-write) - * @param {Map} fileSymbols - Map - * @param {string} rootDir - absolute project root path - * @param {object} opts - build options (ast, complexity, cfg, dataflow toggles) - * @param {object} [engineOpts] - engine options - * @returns {Promise<{ astMs: number, complexityMs: number, cfgMs: number, dataflowMs: number }>} - */ -export async function runAnalyses(db, fileSymbols, rootDir, opts, engineOpts) { - const timing = { astMs: 0, complexityMs: 0, cfgMs: 0, dataflowMs: 0 }; +export async function runAnalyses( + db: BetterSqlite3Database, + fileSymbols: Map, + rootDir: string, + opts: AnalysisOpts, + engineOpts?: EngineOpts, +): Promise { + const timing: AnalysisTiming = { astMs: 0, complexityMs: 0, cfgMs: 0, dataflowMs: 0 }; const doAst = opts.ast !== false; const doComplexity = opts.complexity !== false; @@ -361,13 +422,14 @@ export async function runAnalyses(db, fileSymbols, rootDir, opts, engineOpts) { const defs = symbols.definitions || []; if (astVisitor) { - const astRows = results['ast-store'] || []; + const astRows = (results['ast-store'] || []) as ASTNodeRow[]; if (astRows.length > 0) symbols.astNodes = astRows; } if (complexityVisitor) storeComplexityResults(results, defs, langId); if (cfgVisitor) storeCfgResults(results, defs); - if (dataflowVisitor) symbols.dataflow = results.dataflow; + // biome-ignore lint/complexity/useLiteralKeys: bracket notation required by noPropertyAccessFromIndexSignature + if (dataflowVisitor) symbols.dataflow = results['dataflow'] as DataflowResult; } timing._unifiedWalkMs = performance.now() - t0walk; diff --git a/src/ast-analysis/metrics.js b/src/ast-analysis/metrics.ts similarity index 88% rename from src/ast-analysis/metrics.js rename to src/ast-analysis/metrics.ts index 15bcbcb0..713c4a8e 100644 --- a/src/ast-analysis/metrics.js +++ b/src/ast-analysis/metrics.ts @@ -5,6 +5,8 @@ * all stateless math that can be reused by visitor-based and standalone paths. */ +import type { HalsteadDerivedMetrics, LOCMetrics, TreeSitterNode } from '../types.js'; + // ─── Halstead Derived Metrics ───────────────────────────────────────────── /** @@ -14,7 +16,10 @@ * @param {Map} operands - operand text → count * @returns {{ n1: number, n2: number, bigN1: number, bigN2: number, vocabulary: number, length: number, volume: number, difficulty: number, effort: number, bugs: number }} */ -export function computeHalsteadDerived(operators, operands) { +export function computeHalsteadDerived( + operators: Map, + operands: Map, +): HalsteadDerivedMetrics { const n1 = operators.size; const n2 = operands.size; let bigN1 = 0; @@ -47,7 +52,7 @@ export function computeHalsteadDerived(operators, operands) { const C_STYLE_PREFIXES = ['//', '/*', '*', '*/']; -const COMMENT_PREFIXES = new Map([ +const COMMENT_PREFIXES = new Map([ ['javascript', C_STYLE_PREFIXES], ['typescript', C_STYLE_PREFIXES], ['tsx', C_STYLE_PREFIXES], @@ -67,7 +72,7 @@ const COMMENT_PREFIXES = new Map([ * @param {string} [language] - Language ID (falls back to C-style prefixes) * @returns {{ loc: number, sloc: number, commentLines: number }} */ -export function computeLOCMetrics(functionNode, language) { +export function computeLOCMetrics(functionNode: TreeSitterNode, language?: string): LOCMetrics { const text = functionNode.text; const lines = text.split('\n'); const loc = lines.length; @@ -103,7 +108,12 @@ export function computeLOCMetrics(functionNode, language) { * @param {number} [commentRatio] - Comment ratio (0-1), optional * @returns {number} Normalized MI (0-100) */ -export function computeMaintainabilityIndex(volume, cyclomatic, sloc, commentRatio) { +export function computeMaintainabilityIndex( + volume: number, + cyclomatic: number, + sloc: number, + commentRatio?: number, +): number { const safeVolume = Math.max(volume, 1); const safeSLOC = Math.max(sloc, 1); diff --git a/src/ast-analysis/rules/csharp.js b/src/ast-analysis/rules/csharp.ts similarity index 92% rename from src/ast-analysis/rules/csharp.js rename to src/ast-analysis/rules/csharp.ts index 669ff7da..8c470907 100644 --- a/src/ast-analysis/rules/csharp.js +++ b/src/ast-analysis/rules/csharp.ts @@ -1,12 +1,14 @@ -/** - * C# — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'else_clause', @@ -47,7 +49,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -139,7 +141,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elseViaAlternative: true, forNodes: new Set(['for_statement', 'foreach_statement']), @@ -166,7 +168,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set([ 'method_declaration', 'constructor_declaration', @@ -198,4 +200,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/go.js b/src/ast-analysis/rules/go.ts similarity index 91% rename from src/ast-analysis/rules/go.js rename to src/ast-analysis/rules/go.ts index 7d5aacae..b2792084 100644 --- a/src/ast-analysis/rules/go.js +++ b/src/ast-analysis/rules/go.ts @@ -1,12 +1,14 @@ -/** - * Go — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'for_statement', @@ -35,7 +37,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -122,7 +124,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elseViaAlternative: true, forNodes: new Set(['for_statement']), @@ -144,7 +146,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set(['function_declaration', 'method_declaration', 'func_literal']), returnNode: 'return_statement', varDeclaratorNodes: new Set(['short_var_declaration', 'var_declaration']), @@ -163,7 +165,7 @@ export const dataflow = makeDataflowRules({ expressionListType: 'expression_list', extractParamName(node) { if (node.type === 'parameter_declaration') { - const names = []; + const names: string[] = []; for (const c of node.namedChildren) { if (c.type === 'identifier') names.push(c.text); } @@ -179,4 +181,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/index.js b/src/ast-analysis/rules/index.ts similarity index 78% rename from src/ast-analysis/rules/index.js rename to src/ast-analysis/rules/index.ts index d61901e8..dac941b9 100644 --- a/src/ast-analysis/rules/index.js +++ b/src/ast-analysis/rules/index.ts @@ -1,9 +1,9 @@ -/** - * Assembled rule maps for all AST analysis modules. - * - * Re-exports per-language rules as Maps keyed by language ID. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import * as csharp from './csharp.js'; import * as go from './go.js'; import * as java from './java.js'; @@ -15,7 +15,7 @@ import * as rust from './rust.js'; // ─── Complexity Rules ───────────────────────────────────────────────────── -export const COMPLEXITY_RULES = new Map([ +export const COMPLEXITY_RULES: Map = new Map([ ['javascript', javascript.complexity], ['typescript', javascript.complexity], ['tsx', javascript.complexity], @@ -30,7 +30,7 @@ export const COMPLEXITY_RULES = new Map([ // ─── Halstead Rules ─────────────────────────────────────────────────────── -export const HALSTEAD_RULES = new Map([ +export const HALSTEAD_RULES: Map = new Map([ ['javascript', javascript.halstead], ['typescript', javascript.halstead], ['tsx', javascript.halstead], @@ -45,7 +45,7 @@ export const HALSTEAD_RULES = new Map([ // ─── CFG Rules ──────────────────────────────────────────────────────────── -export const CFG_RULES = new Map([ +export const CFG_RULES: Map = new Map([ ['javascript', javascript.cfg], ['typescript', javascript.cfg], ['tsx', javascript.cfg], @@ -60,7 +60,7 @@ export const CFG_RULES = new Map([ // ─── Dataflow Rules ────────────────────────────────────────────────────── -export const DATAFLOW_RULES = new Map([ +export const DATAFLOW_RULES: Map = new Map([ ['javascript', javascript.dataflow], ['typescript', javascript.dataflow], ['tsx', javascript.dataflow], @@ -75,8 +75,8 @@ export const DATAFLOW_RULES = new Map([ // ─── AST Type Maps ─────────────────────────────────────────────────────── -export const AST_TYPE_MAPS = new Map([ - ['javascript', javascript.astTypes], - ['typescript', javascript.astTypes], - ['tsx', javascript.astTypes], +export const AST_TYPE_MAPS: Map> = new Map([ + ['javascript', javascript.astTypes as Record], + ['typescript', javascript.astTypes as Record], + ['tsx', javascript.astTypes as Record], ]); diff --git a/src/ast-analysis/rules/java.js b/src/ast-analysis/rules/java.ts similarity index 92% rename from src/ast-analysis/rules/java.js rename to src/ast-analysis/rules/java.ts index 16c73251..0b18d456 100644 --- a/src/ast-analysis/rules/java.js +++ b/src/ast-analysis/rules/java.ts @@ -1,12 +1,14 @@ -/** - * Java — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'for_statement', @@ -40,7 +42,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -123,7 +125,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elseViaAlternative: true, forNodes: new Set(['for_statement', 'enhanced_for_statement']), @@ -146,7 +148,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set(['method_declaration', 'constructor_declaration', 'lambda_expression']), returnNode: 'return_statement', varDeclaratorNode: 'variable_declarator', @@ -172,4 +174,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/javascript.js b/src/ast-analysis/rules/javascript.ts similarity index 93% rename from src/ast-analysis/rules/javascript.js rename to src/ast-analysis/rules/javascript.ts index 99364121..8140abc4 100644 --- a/src/ast-analysis/rules/javascript.js +++ b/src/ast-analysis/rules/javascript.ts @@ -1,14 +1,14 @@ -/** - * JavaScript / TypeScript / TSX — AST analysis rules. - * - * Shared across: javascript, typescript, tsx language IDs. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'else_clause', @@ -51,7 +51,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ // Arithmetic '+', @@ -160,7 +160,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elseClause: 'else_clause', forNodes: new Set(['for_statement', 'for_in_statement']), @@ -205,7 +205,7 @@ const JS_TS_MUTATING = new Set([ 'clear', ]); -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set([ 'function_declaration', 'method_definition', @@ -236,7 +236,7 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = { +export const astTypes: Record | null = { new_expression: 'new', throw_statement: 'throw', await_expression: 'await', diff --git a/src/ast-analysis/rules/php.js b/src/ast-analysis/rules/php.ts similarity index 93% rename from src/ast-analysis/rules/php.js rename to src/ast-analysis/rules/php.ts index b930008d..de689e08 100644 --- a/src/ast-analysis/rules/php.js +++ b/src/ast-analysis/rules/php.ts @@ -1,12 +1,14 @@ -/** - * PHP — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'else_if_clause', @@ -48,7 +50,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -147,7 +149,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elifNode: 'else_if_clause', elseClause: 'else_clause', @@ -176,7 +178,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set([ 'function_definition', 'method_declaration', @@ -216,4 +218,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/python.js b/src/ast-analysis/rules/python.ts similarity index 92% rename from src/ast-analysis/rules/python.js rename to src/ast-analysis/rules/python.ts index 99319eb0..89d3f803 100644 --- a/src/ast-analysis/rules/python.js +++ b/src/ast-analysis/rules/python.ts @@ -1,12 +1,14 @@ -/** - * Python — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_statement', 'elif_clause', @@ -38,7 +40,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -119,7 +121,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_statement', elifNode: 'elif_clause', elseClause: 'else_clause', @@ -140,7 +142,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set(['function_definition', 'lambda']), defaultParamType: 'default_parameter', restParamType: 'list_splat_pattern', @@ -193,4 +195,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/ruby.js b/src/ast-analysis/rules/ruby.ts similarity index 92% rename from src/ast-analysis/rules/ruby.js rename to src/ast-analysis/rules/ruby.ts index 722ef845..ea18c7ac 100644 --- a/src/ast-analysis/rules/ruby.js +++ b/src/ast-analysis/rules/ruby.ts @@ -1,12 +1,14 @@ -/** - * Ruby — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if', 'elsif', @@ -34,7 +36,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -128,7 +130,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if', elifNode: 'elsif', elseClause: 'else', @@ -151,7 +153,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set(['method', 'singleton_method', 'lambda']), paramListField: 'parameters', returnNode: 'return', @@ -201,4 +203,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/rules/rust.js b/src/ast-analysis/rules/rust.ts similarity index 91% rename from src/ast-analysis/rules/rust.js rename to src/ast-analysis/rules/rust.ts index 4cc91326..4af834db 100644 --- a/src/ast-analysis/rules/rust.js +++ b/src/ast-analysis/rules/rust.ts @@ -1,12 +1,14 @@ -/** - * Rust — AST analysis rules. - */ - +import type { + CfgRulesConfig, + ComplexityRules, + DataflowRulesConfig, + HalsteadRules, +} from '../../types.js'; import { makeCfgRules, makeDataflowRules } from '../shared.js'; // ─── Complexity ─────────────────────────────────────────────────────────── -export const complexity = { +export const complexity: ComplexityRules = { branchNodes: new Set([ 'if_expression', 'else_clause', @@ -40,7 +42,7 @@ export const complexity = { // ─── Halstead ───────────────────────────────────────────────────────────── -export const halstead = { +export const halstead: HalsteadRules = { operatorLeafTypes: new Set([ '+', '-', @@ -123,7 +125,7 @@ export const halstead = { // ─── CFG ────────────────────────────────────────────────────────────────── -export const cfg = makeCfgRules({ +export const cfg: CfgRulesConfig = makeCfgRules({ ifNode: 'if_expression', ifNodes: new Set(['if_let_expression']), elseClause: 'else_clause', @@ -142,7 +144,7 @@ export const cfg = makeCfgRules({ // ─── Dataflow ───────────────────────────────────────────────────────────── -export const dataflow = makeDataflowRules({ +export const dataflow: DataflowRulesConfig = makeDataflowRules({ functionNodes: new Set(['function_item', 'closure_expression']), returnNode: 'return_expression', varDeclaratorNode: 'let_declaration', @@ -170,4 +172,4 @@ export const dataflow = makeDataflowRules({ // ─── AST Node Types ─────────────────────────────────────────────────────── -export const astTypes = null; +export const astTypes: Record | null = null; diff --git a/src/ast-analysis/shared.js b/src/ast-analysis/shared.ts similarity index 74% rename from src/ast-analysis/shared.js rename to src/ast-analysis/shared.ts index e3f40bd0..18b197db 100644 --- a/src/ast-analysis/shared.js +++ b/src/ast-analysis/shared.ts @@ -1,21 +1,19 @@ -/** - * Shared utilities for AST analysis modules (complexity, CFG, dataflow, AST nodes). - */ - import { LANGUAGE_REGISTRY } from '../domain/parser.js'; import { ConfigError } from '../shared/errors.js'; +import type { + CfgRulesConfig, + DataflowRulesConfig, + LanguageRegistryEntry, + TreeSitterNode, +} from '../types.js'; // ─── Generic Rule Factory ───────────────────────────────────────────────── -/** - * Merge defaults with overrides, validating that all keys are known. - * - * @param {object} defaults - Default rule values (defines the valid key set) - * @param {object} overrides - Language-specific overrides - * @param {string} label - Label for error messages (e.g. "CFG", "Dataflow") - * @returns {object} Merged rules - */ -export function makeRules(defaults, overrides, label) { +export function makeRules( + defaults: Record, + overrides: Record, + label: string, +): Record { const validKeys = new Set(Object.keys(defaults)); for (const key of Object.keys(overrides)) { if (!validKeys.has(key)) { @@ -27,7 +25,7 @@ export function makeRules(defaults, overrides, label) { // ─── CFG Defaults + Factory ─────────────────────────────────────────────── -export const CFG_DEFAULTS = { +export const CFG_DEFAULTS: CfgRulesConfig = { ifNode: null, ifNodes: null, elifNode: null, @@ -59,8 +57,12 @@ export const CFG_DEFAULTS = { functionNodes: new Set(), }; -export function makeCfgRules(overrides) { - const rules = makeRules(CFG_DEFAULTS, overrides, 'CFG'); +export function makeCfgRules(overrides: Partial): CfgRulesConfig { + const rules = makeRules( + CFG_DEFAULTS as unknown as Record, + overrides as unknown as Record, + 'CFG', + ) as unknown as CfgRulesConfig; if (!(rules.functionNodes instanceof Set) || rules.functionNodes.size === 0) { throw new ConfigError('CFG rules: functionNodes must be a non-empty Set'); } @@ -72,7 +74,7 @@ export function makeCfgRules(overrides) { // ─── Dataflow Defaults + Factory ────────────────────────────────────────── -export const DATAFLOW_DEFAULTS = { +export const DATAFLOW_DEFAULTS: DataflowRulesConfig = { // Scope entry functionNodes: new Set(), // REQUIRED: non-empty @@ -134,8 +136,12 @@ export const DATAFLOW_DEFAULTS = { extraIdentifierTypes: null, // Set of additional identifier-like types (PHP: variable_name, name) }; -export function makeDataflowRules(overrides) { - const rules = makeRules(DATAFLOW_DEFAULTS, overrides, 'Dataflow'); +export function makeDataflowRules(overrides: Partial): DataflowRulesConfig { + const rules = makeRules( + DATAFLOW_DEFAULTS as unknown as Record, + overrides as unknown as Record, + 'Dataflow', + ) as unknown as DataflowRulesConfig; if (!(rules.functionNodes instanceof Set) || rules.functionNodes.size === 0) { throw new ConfigError('Dataflow rules: functionNodes must be a non-empty Set'); } @@ -144,16 +150,18 @@ export function makeDataflowRules(overrides) { // ─── AST Helpers ────────────────────────────────────────────────────────── -/** - * Find the function body node in a parse tree that matches a given line range. - */ -export function findFunctionNode(rootNode, startLine, _endLine, rules) { +export function findFunctionNode( + rootNode: TreeSitterNode, + startLine: number, + _endLine: number, + rules: { functionNodes: Set }, +): TreeSitterNode | null { // tree-sitter lines are 0-indexed const targetStart = startLine - 1; - let best = null; + let best: TreeSitterNode | null = null; - function search(node) { + function search(node: TreeSitterNode): void { const nodeStart = node.startPosition.row; const nodeEnd = node.endPosition.row; @@ -168,7 +176,8 @@ export function findFunctionNode(rootNode, startLine, _endLine, rules) { } for (let i = 0; i < node.childCount; i++) { - search(node.child(i)); + // biome-ignore lint/style/noNonNullAssertion: tree-sitter child(i) within childCount is always non-null + search(node.child(i)!); } } @@ -178,14 +187,10 @@ export function findFunctionNode(rootNode, startLine, _endLine, rules) { // ─── Extension / Language Mapping ───────────────────────────────────────── -/** - * Build a Map from file extension → language ID using the parser registry. - * - * @param {Iterable} [registry=LANGUAGE_REGISTRY] - Language registry entries - * @returns {Map} - */ -export function buildExtToLangMap(registry = LANGUAGE_REGISTRY) { - const map = new Map(); +export function buildExtToLangMap( + registry: LanguageRegistryEntry[] = LANGUAGE_REGISTRY as unknown as LanguageRegistryEntry[], +): Map { + const map = new Map(); for (const entry of registry) { for (const ext of entry.extensions) { map.set(ext, entry.id); @@ -194,15 +199,11 @@ export function buildExtToLangMap(registry = LANGUAGE_REGISTRY) { return map; } -/** - * Build a Set of file extensions for languages that have entries in the given rules Map. - * - * @param {Map} rulesMap - e.g. COMPLEXITY_RULES, CFG_RULES - * @param {Iterable} [registry=LANGUAGE_REGISTRY] - Language registry entries - * @returns {Set} - */ -export function buildExtensionSet(rulesMap, registry = LANGUAGE_REGISTRY) { - const extensions = new Set(); +export function buildExtensionSet( + rulesMap: Map, + registry: LanguageRegistryEntry[] = LANGUAGE_REGISTRY as unknown as LanguageRegistryEntry[], +): Set { + const extensions = new Set(); for (const entry of registry) { if (rulesMap.has(entry.id)) { for (const ext of entry.extensions) extensions.add(ext); diff --git a/src/ast-analysis/visitor-utils.js b/src/ast-analysis/visitor-utils.ts similarity index 75% rename from src/ast-analysis/visitor-utils.js rename to src/ast-analysis/visitor-utils.ts index b66e1b5a..ec745cca 100644 --- a/src/ast-analysis/visitor-utils.js +++ b/src/ast-analysis/visitor-utils.ts @@ -4,10 +4,40 @@ * Extracted from dataflow.js to be reusable across the visitor framework. */ +import type { TreeSitterNode } from '../types.js'; + +interface ParamInfo { + name: string; + index: number; +} + +interface LanguageRules { + nameField: string; + varAssignedFnParent?: string; + pairFnParent?: string; + assignmentFnParent?: string; + assignLeftField?: string; + paramIdentifier: string; + paramWrapperTypes: Set; + defaultParamType?: string; + restParamType?: string; + objectDestructType?: string; + shorthandPropPattern?: string; + pairPatternType?: string; + arrayDestructType?: string; + extraIdentifierTypes?: Set; + callFunctionField: string; + memberNode: string; + memberPropertyField: string; + memberObjectField: string; + optionalChainNode?: string; + extractParamName?(node: TreeSitterNode): string[] | null; +} + /** * Truncate a string to a maximum length. */ -export function truncate(str, max = 120) { +export function truncate(str: string, max = 120): string { if (!str) return ''; return str.length > max ? `${str.slice(0, max)}…` : str; } @@ -15,7 +45,7 @@ export function truncate(str, max = 120) { /** * Get the name of a function node from the AST using rules. */ -export function functionName(fnNode, rules) { +export function functionName(fnNode: TreeSitterNode | null, rules: LanguageRules): string | null { if (!fnNode) return null; const nameNode = fnNode.childForFieldName(rules.nameField); if (nameNode) return nameNode.text; @@ -31,7 +61,7 @@ export function functionName(fnNode, rules) { return keyNode ? keyNode.text : null; } if (rules.assignmentFnParent && parent.type === rules.assignmentFnParent) { - const left = parent.childForFieldName(rules.assignLeftField); + const left = parent.childForFieldName(rules.assignLeftField!); return left ? left.text : null; } } @@ -41,9 +71,12 @@ export function functionName(fnNode, rules) { /** * Extract parameter names and indices from a formal_parameters node. */ -export function extractParams(paramsNode, rules) { +export function extractParams( + paramsNode: TreeSitterNode | null, + rules: LanguageRules, +): ParamInfo[] { if (!paramsNode) return []; - const result = []; + const result: ParamInfo[] = []; let index = 0; for (const child of paramsNode.namedChildren) { const names = extractParamNames(child, rules); @@ -58,7 +91,7 @@ export function extractParams(paramsNode, rules) { /** * Extract parameter names from a single parameter node. */ -export function extractParamNames(node, rules) { +export function extractParamNames(node: TreeSitterNode | null, rules: LanguageRules): string[] { if (!node) return []; const t = node.type; @@ -89,7 +122,7 @@ export function extractParamNames(node, rules) { } if (rules.objectDestructType && t === rules.objectDestructType) { - const names = []; + const names: string[] = []; for (const child of node.namedChildren) { if (rules.shorthandPropPattern && child.type === rules.shorthandPropPattern) { names.push(child.text); @@ -104,7 +137,7 @@ export function extractParamNames(node, rules) { } if (rules.arrayDestructType && t === rules.arrayDestructType) { - const names = []; + const names: string[] = []; for (const child of node.namedChildren) { names.push(...extractParamNames(child, rules)); } @@ -117,7 +150,7 @@ export function extractParamNames(node, rules) { /** * Check if a node type is identifier-like for this language. */ -export function isIdent(nodeType, rules) { +export function isIdent(nodeType: string, rules: LanguageRules): boolean { if (nodeType === 'identifier' || nodeType === rules.paramIdentifier) return true; return rules.extraIdentifierTypes ? rules.extraIdentifierTypes.has(nodeType) : false; } @@ -125,7 +158,7 @@ export function isIdent(nodeType, rules) { /** * Resolve the name a call expression is calling using rules. */ -export function resolveCalleeName(callNode, rules) { +export function resolveCalleeName(callNode: TreeSitterNode, rules: LanguageRules): string | null { const fn = callNode.childForFieldName(rules.callFunctionField); if (!fn) { const nameNode = callNode.childForFieldName('name') || callNode.childForFieldName('method'); @@ -153,7 +186,7 @@ export function resolveCalleeName(callNode, rules) { /** * Get the receiver (object) of a member expression using rules. */ -export function memberReceiver(memberExpr, rules) { +export function memberReceiver(memberExpr: TreeSitterNode, rules: LanguageRules): string | null { const obj = memberExpr.childForFieldName(rules.memberObjectField); if (!obj) return null; if (isIdent(obj.type, rules)) return obj.text; @@ -164,7 +197,11 @@ export function memberReceiver(memberExpr, rules) { /** * Collect all identifier names referenced within a node. */ -export function collectIdentifiers(node, out, rules) { +export function collectIdentifiers( + node: TreeSitterNode | null, + out: string[], + rules: LanguageRules, +): void { if (!node) return; if (isIdent(node.type, rules)) { out.push(node.text); diff --git a/src/ast-analysis/visitor.js b/src/ast-analysis/visitor.ts similarity index 71% rename from src/ast-analysis/visitor.js rename to src/ast-analysis/visitor.ts index b7e2f538..2bbea832 100644 --- a/src/ast-analysis/visitor.js +++ b/src/ast-analysis/visitor.ts @@ -9,24 +9,17 @@ * * The walker maintains shared context (nestingLevel, scopeStack, currentFunction) * so individual visitors don't need to track traversal state themselves. - * - * @typedef {object} VisitorContext - * @property {number} nestingLevel - Current nesting depth (for complexity) - * @property {object} currentFunction - Enclosing function node (or null) - * @property {string} langId - Language ID - * @property {Array} scopeStack - Function scope stack [{funcName, funcNode, params, locals}] - * - * @typedef {object} Visitor - * @property {string} name - * @property {function} [init](langId, rules) - Called once before the walk - * @property {function} [enterNode](node, context) - Called entering each node; return { skipChildren: true } to skip this visitor's hooks for descendants - * @property {function} [exitNode](node, context) - Called leaving each node - * @property {function} [enterFunction](funcNode, funcName, context) - Called entering a function - * @property {function} [exitFunction](funcNode, funcName, context) - Called leaving a function - * @property {function} [finish]() - Called after the walk; return collected data - * @property {Set} [functionNodeTypes] - Extra function node types this visitor cares about */ +import type { + ScopeEntry, + TreeSitterNode, + Visitor, + VisitorContext, + WalkOptions, + WalkResults, +} from '../types.js'; + /** * Walk an AST root with multiple visitors in a single DFS pass. * @@ -39,10 +32,15 @@ * @param {function} [options.getFunctionName] - (funcNode) => string|null * @returns {object} Map of visitor.name → finish() result */ -export function walkWithVisitors(rootNode, visitors, langId, options = {}) { +export function walkWithVisitors( + rootNode: TreeSitterNode, + visitors: Visitor[], + langId: string, + options: WalkOptions = {}, +): WalkResults { const { - functionNodeTypes = new Set(), - nestingNodeTypes = new Set(), + functionNodeTypes = new Set(), + nestingNodeTypes = new Set(), getFunctionName = () => null, } = options; @@ -60,8 +58,8 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { } // Shared context object (mutated during walk) - const scopeStack = []; - const context = { + const scopeStack: ScopeEntry[] = []; + const context: VisitorContext = { nestingLevel: 0, currentFunction: null, langId, @@ -70,14 +68,14 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { // Track which visitors have requested skipChildren at each depth // Key: visitor index, Value: depth at which skip was requested - const skipDepths = new Map(); + const skipDepths = new Map(); - function walk(node, depth) { + function walk(node: TreeSitterNode | null, depth: number): void { if (!node) return; const type = node.type; const isFunction = allFuncTypes.has(type); - let funcName = null; + let funcName: string | null = null; // Function boundary: enter if (isFunction) { @@ -85,7 +83,7 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { context.currentFunction = node; scopeStack.push({ funcName, funcNode: node, params: new Map(), locals: new Map() }); for (let i = 0; i < visitors.length; i++) { - const v = visitors[i]; + const v = visitors[i]!; if (v.enterFunction && !isSkipped(i, depth)) { v.enterFunction(node, funcName, context); } @@ -94,7 +92,7 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { // enterNode hooks for (let i = 0; i < visitors.length; i++) { - const v = visitors[i]; + const v = visitors[i]!; if (v.enterNode && !isSkipped(i, depth)) { const result = v.enterNode(node, context); if (result?.skipChildren) { @@ -117,7 +115,7 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { // exitNode hooks for (let i = 0; i < visitors.length; i++) { - const v = visitors[i]; + const v = visitors[i]!; if (v.exitNode && !isSkipped(i, depth)) { v.exitNode(node, context); } @@ -133,18 +131,18 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { // Function boundary: exit if (isFunction) { for (let i = 0; i < visitors.length; i++) { - const v = visitors[i]; + const v = visitors[i]!; if (v.exitFunction && !isSkipped(i, depth)) { v.exitFunction(node, funcName, context); } } scopeStack.pop(); context.currentFunction = - scopeStack.length > 0 ? scopeStack[scopeStack.length - 1].funcNode : null; + scopeStack.length > 0 ? scopeStack[scopeStack.length - 1]!.funcNode : null; } } - function isSkipped(visitorIndex, currentDepth) { + function isSkipped(visitorIndex: number, currentDepth: number): boolean { const skipAt = skipDepths.get(visitorIndex); // Skipped if skip was requested at a shallower (or equal) depth // We skip descendants, not the node itself, so skip when currentDepth > skipAt @@ -154,7 +152,7 @@ export function walkWithVisitors(rootNode, visitors, langId, options = {}) { walk(rootNode, 0); // Collect results - const results = {}; + const results: WalkResults = {}; for (const v of visitors) { results[v.name] = v.finish ? v.finish() : undefined; } diff --git a/src/ast-analysis/visitors/ast-store-visitor.js b/src/ast-analysis/visitors/ast-store-visitor.ts similarity index 63% rename from src/ast-analysis/visitors/ast-store-visitor.js rename to src/ast-analysis/visitors/ast-store-visitor.ts index 18fe6cc1..9d51d775 100644 --- a/src/ast-analysis/visitors/ast-store-visitor.js +++ b/src/ast-analysis/visitors/ast-store-visitor.ts @@ -1,30 +1,42 @@ -/** - * Visitor: Extract new/throw/await/string/regex AST nodes during a shared walk. - * - * Replaces the standalone walkAst() DFS in ast.js with a visitor that plugs - * into the unified walkWithVisitors framework. - */ - -/** Max length for the `text` column. */ +import type { + Definition, + EnterNodeResult, + TreeSitterNode, + Visitor, + VisitorContext, +} from '../../types.js'; + const TEXT_MAX = 200; -function truncate(s, max = TEXT_MAX) { +interface AstStoreRow { + file: string; + line: number; + kind: string; + name: string | null | undefined; + text: string | null; + receiver: null; + parentNodeId: number | null; +} + +function truncate(s: string | null | undefined, max: number = TEXT_MAX): string | null { if (!s) return null; return s.length <= max ? s : `${s.slice(0, max - 1)}\u2026`; } -function extractNewName(node) { +function extractNewName(node: TreeSitterNode): string { for (let i = 0; i < node.childCount; i++) { const child = node.child(i); + if (!child) continue; if (child.type === 'identifier') return child.text; if (child.type === 'member_expression') return child.text; } return node.text?.split('(')[0]?.replace('new ', '').trim() || '?'; } -function extractExpressionText(node) { +function extractExpressionText(node: TreeSitterNode): string | null { for (let i = 0; i < node.childCount; i++) { const child = node.child(i); + if (!child) continue; if (child.type !== 'throw' && child.type !== 'await') { return truncate(child.text); } @@ -32,10 +44,11 @@ function extractExpressionText(node) { return truncate(node.text); } -function extractName(kind, node) { +function extractName(kind: string, node: TreeSitterNode): string | null { if (kind === 'throw') { for (let i = 0; i < node.childCount; i++) { const child = node.child(i); + if (!child) continue; if (child.type === 'new_expression') return extractNewName(child); if (child.type === 'call_expression') { const fn = child.childForFieldName('function'); @@ -48,6 +61,7 @@ function extractName(kind, node) { if (kind === 'await') { for (let i = 0; i < node.childCount; i++) { const child = node.child(i); + if (!child) continue; if (child.type === 'call_expression') { const fn = child.childForFieldName('function'); return fn ? fn.text : child.text?.split('(')[0] || '?'; @@ -61,25 +75,20 @@ function extractName(kind, node) { return truncate(node.text); } -/** - * Create an AST-store visitor for use with walkWithVisitors. - * - * @param {object} astTypeMap - node type → kind mapping (e.g. JS_TS_AST_TYPES) - * @param {object[]} defs - symbol definitions for parent lookup - * @param {string} relPath - relative file path - * @param {Map} nodeIdMap - def key → node ID mapping - * @returns {Visitor} - */ -export function createAstStoreVisitor(astTypeMap, defs, relPath, nodeIdMap) { - const rows = []; - // Track which nodes we've already matched to avoid duplicates in recursive walk - const matched = new Set(); - - function findParentDef(line) { - let best = null; +export function createAstStoreVisitor( + astTypeMap: Record, + defs: Definition[], + relPath: string, + nodeIdMap: Map, +): Visitor { + const rows: AstStoreRow[] = []; + const matched = new Set(); + + function findParentDef(line: number): Definition | null { + let best: Definition | null = null; for (const def of defs) { if (def.line <= line && (def.endLine == null || def.endLine >= line)) { - if (!best || def.endLine - def.line < best.endLine - best.line) { + if (!best || (def.endLine ?? 0) - def.line < (best.endLine ?? 0) - best.line) { best = def; } } @@ -87,7 +96,7 @@ export function createAstStoreVisitor(astTypeMap, defs, relPath, nodeIdMap) { return best; } - function resolveParentNodeId(line) { + function resolveParentNodeId(line: number): number | null { const parentDef = findParentDef(line); if (!parentDef) return null; return nodeIdMap.get(`${parentDef.name}|${parentDef.kind}|${parentDef.line}`) || null; @@ -96,15 +105,16 @@ export function createAstStoreVisitor(astTypeMap, defs, relPath, nodeIdMap) { return { name: 'ast-store', - enterNode(node, _context) { - if (matched.has(node.id)) return; + enterNode(node: TreeSitterNode, _context: VisitorContext): EnterNodeResult | undefined { + // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface + if (matched.has((node as any).id)) return; const kind = astTypeMap[node.type]; if (!kind) return; const line = node.startPosition.row + 1; - let name; - let text = null; + let name: string | null | undefined; + let text: string | null = null; if (kind === 'new') { name = extractNewName(node); @@ -117,7 +127,7 @@ export function createAstStoreVisitor(astTypeMap, defs, relPath, nodeIdMap) { text = extractExpressionText(node); } else if (kind === 'string') { const content = node.text?.replace(/^['"`]|['"`]$/g, '') || ''; - if (content.length < 2) return; // skip trivial strings, walker still descends + if (content.length < 2) return; name = truncate(content, 100); text = truncate(node.text); } else if (kind === 'regex') { @@ -135,15 +145,15 @@ export function createAstStoreVisitor(astTypeMap, defs, relPath, nodeIdMap) { parentNodeId: resolveParentNodeId(line), }); - matched.add(node.id); + // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface + matched.add((node as any).id); - // Don't recurse into children for new/throw/await (same as original walkAst) if (kind !== 'string' && kind !== 'regex') { return { skipChildren: true }; } }, - finish() { + finish(): AstStoreRow[] { return rows; }, }; diff --git a/src/ast-analysis/visitors/cfg-visitor.js b/src/ast-analysis/visitors/cfg-visitor.ts similarity index 70% rename from src/ast-analysis/visitors/cfg-visitor.js rename to src/ast-analysis/visitors/cfg-visitor.ts index 97bb344f..6d27b8e0 100644 --- a/src/ast-analysis/visitors/cfg-visitor.js +++ b/src/ast-analysis/visitors/cfg-visitor.ts @@ -1,45 +1,89 @@ -/** - * Visitor: Build intraprocedural Control Flow Graphs (CFGs) from tree-sitter AST. - * - * Replaces the statement-level traversal in cfg.js (buildFunctionCFG) with a - * node-level visitor that plugs into the unified walkWithVisitors framework. - * This eliminates the last redundant tree traversal (Mode B) in engine.js, - * unifying all 4 analyses into a single DFS walk. - * - * The visitor builds basic blocks and edges incrementally via enterNode/exitNode - * hooks, using a control-flow frame stack to track branch/loop/switch context. - */ - -// ── Node-type predicates ──────────────────────────────────────────────── - -function isIfNode(type, cfgRules) { +import type { TreeSitterNode, Visitor, VisitorContext } from '../../types.js'; + +// biome-ignore lint/suspicious/noExplicitAny: CFG rules are opaque language-specific objects +type AnyRules = any; + +function nn(node: TreeSitterNode | null): TreeSitterNode { + return node as TreeSitterNode; +} + +interface CfgBlockInternal { + index: number; + type: string; + startLine: number | null; + endLine: number | null; + label: string | null; +} + +interface CfgEdgeInternal { + sourceIndex: number; + targetIndex: number; + kind: string; +} + +interface LabelCtx { + headerBlock: CfgBlockInternal | null; + exitBlock: CfgBlockInternal | null; +} + +interface LoopCtx { + headerBlock: CfgBlockInternal; + exitBlock: CfgBlockInternal; +} + +interface FuncState { + blocks: CfgBlockInternal[]; + edges: CfgEdgeInternal[]; + makeBlock( + type: string, + startLine?: number | null, + endLine?: number | null, + label?: string | null, + ): CfgBlockInternal; + addEdge(source: CfgBlockInternal, target: CfgBlockInternal, kind: string): void; + entryBlock: CfgBlockInternal; + exitBlock: CfgBlockInternal; + currentBlock: CfgBlockInternal | null; + loopStack: LoopCtx[]; + labelMap: Map; + cfgStack: FuncState[]; + funcNode: TreeSitterNode | null; +} + +interface CFGResultInternal { + funcNode: TreeSitterNode; + blocks: CfgBlockInternal[]; + edges: CfgEdgeInternal[]; + cyclomatic: number; +} + +function isIfNode(type: string, cfgRules: AnyRules): boolean { return type === cfgRules.ifNode || cfgRules.ifNodes?.has(type); } -function isForNode(type, cfgRules) { +function isForNode(type: string, cfgRules: AnyRules): boolean { return cfgRules.forNodes.has(type); } -function isWhileNode(type, cfgRules) { +function isWhileNode(type: string, cfgRules: AnyRules): boolean { return type === cfgRules.whileNode || cfgRules.whileNodes?.has(type); } -function isSwitchNode(type, cfgRules) { +function isSwitchNode(type: string, cfgRules: AnyRules): boolean { return type === cfgRules.switchNode || cfgRules.switchNodes?.has(type); } -function isCaseNode(type, cfgRules) { +function isCaseNode(type: string, cfgRules: AnyRules): boolean { return ( type === cfgRules.caseNode || type === cfgRules.defaultNode || cfgRules.caseNodes?.has(type) ); } -function isBlockNode(type, cfgRules) { +function isBlockNode(type: string, cfgRules: AnyRules): boolean { return type === 'statement_list' || type === cfgRules.blockNode || cfgRules.blockNodes?.has(type); } -/** Check if a node is a control-flow statement that we handle specially */ -function isControlFlow(type, cfgRules) { +function isControlFlow(type: string, cfgRules: AnyRules): boolean { return ( isIfNode(type, cfgRules) || (cfgRules.unlessNode && type === cfgRules.unlessNode) || @@ -58,24 +102,20 @@ function isControlFlow(type, cfgRules) { ); } -// ── Utility functions ─────────────────────────────────────────────────── - -/** - * Get the actual control-flow node (unwrapping expression_statement if needed). - */ -function effectiveNode(node, cfgRules) { +function effectiveNode(node: TreeSitterNode, cfgRules: AnyRules): TreeSitterNode { if (node.type === 'expression_statement' && node.namedChildCount === 1) { - const inner = node.namedChild(0); + const inner = nn(node.namedChild(0)); if (isControlFlow(inner.type, cfgRules)) return inner; } return node; } -/** - * Register a loop/switch in label map for labeled break/continue. - */ -function registerLabelCtx(S, headerBlock, exitBlock) { - for (const [, ctx] of S.labelMap) { +function registerLabelCtx( + S: FuncState, + headerBlock: CfgBlockInternal, + exitBlock: CfgBlockInternal, +): void { + for (const [, ctx] of Array.from(S.labelMap)) { if (!ctx.headerBlock) { ctx.headerBlock = headerBlock; ctx.exitBlock = exitBlock; @@ -83,19 +123,15 @@ function registerLabelCtx(S, headerBlock, exitBlock) { } } -/** - * Get statements from a body node (block or single statement). - * Returns effective (unwrapped) nodes. - */ -function getBodyStatements(bodyNode, cfgRules) { +function getBodyStatements(bodyNode: TreeSitterNode | null, cfgRules: AnyRules): TreeSitterNode[] { if (!bodyNode) return []; if (isBlockNode(bodyNode.type, cfgRules)) { - const stmts = []; + const stmts: TreeSitterNode[] = []; for (let i = 0; i < bodyNode.namedChildCount; i++) { - const child = bodyNode.namedChild(i); + const child = nn(bodyNode.namedChild(i)); if (child.type === 'statement_list') { for (let j = 0; j < child.namedChildCount; j++) { - stmts.push(child.namedChild(j)); + stmts.push(nn(child.namedChild(j))); } } else { stmts.push(child); @@ -106,18 +142,23 @@ function getBodyStatements(bodyNode, cfgRules) { return [bodyNode]; } -function makeFuncState() { - const blocks = []; - const edges = []; +function makeFuncState(): FuncState { + const blocks: CfgBlockInternal[] = []; + const edges: CfgEdgeInternal[] = []; let nextIndex = 0; - function makeBlock(type, startLine = null, endLine = null, label = null) { - const block = { index: nextIndex++, type, startLine, endLine, label }; + function makeBlock( + type: string, + startLine: number | null = null, + endLine: number | null = null, + label: string | null = null, + ): CfgBlockInternal { + const block: CfgBlockInternal = { index: nextIndex++, type, startLine, endLine, label }; blocks.push(block); return block; } - function addEdge(source, target, kind) { + function addEdge(source: CfgBlockInternal, target: CfgBlockInternal, kind: string): void { edges.push({ sourceIndex: source.index, targetIndex: target.index, kind }); } @@ -141,10 +182,13 @@ function makeFuncState() { }; } -// ── Statement processors ──────────────────────────────────────────────── - -function processStatements(stmts, currentBlock, S, cfgRules) { - let cur = currentBlock; +function processStatements( + stmts: TreeSitterNode[], + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal | null { + let cur: CfgBlockInternal | null = currentBlock; for (const stmt of stmts) { if (!cur) break; cur = processStatement(stmt, cur, S, cfgRules); @@ -152,7 +196,12 @@ function processStatements(stmts, currentBlock, S, cfgRules) { return cur; } -function processStatement(stmt, currentBlock, S, cfgRules) { +function processStatement( + stmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal | null { if (!stmt || !currentBlock) return currentBlock; const effNode = effectiveNode(stmt, cfgRules); @@ -199,7 +248,6 @@ function processStatement(stmt, currentBlock, S, cfgRules) { return processContinue(effNode, currentBlock, S); } - // Regular statement — extend current block if (!currentBlock.startLine) { currentBlock.startLine = stmt.startPosition.row + 1; } @@ -207,14 +255,17 @@ function processStatement(stmt, currentBlock, S, cfgRules) { return currentBlock; } -// ── Labeled / break / continue ────────────────────────────────────────── - -function processLabeled(node, currentBlock, S, cfgRules) { +function processLabeled( + node: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal | null { const labelNode = node.childForFieldName('label'); const labelName = labelNode ? labelNode.text : null; const body = node.childForFieldName('body'); if (body && labelName) { - const labelCtx = { headerBlock: null, exitBlock: null }; + const labelCtx: LabelCtx = { headerBlock: null, exitBlock: null }; S.labelMap.set(labelName, labelCtx); const result = processStatement(body, currentBlock, S, cfgRules); S.labelMap.delete(labelName); @@ -223,15 +274,19 @@ function processLabeled(node, currentBlock, S, cfgRules) { return currentBlock; } -function processBreak(node, currentBlock, S) { +function processBreak( + node: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, +): CfgBlockInternal | null { const labelNode = node.childForFieldName('label'); const labelName = labelNode ? labelNode.text : null; - let target = null; + let target: CfgBlockInternal | null = null; if (labelName && S.labelMap.has(labelName)) { - target = S.labelMap.get(labelName).exitBlock; + target = (S.labelMap.get(labelName) as LabelCtx).exitBlock; } else if (S.loopStack.length > 0) { - target = S.loopStack[S.loopStack.length - 1].exitBlock; + target = S.loopStack[S.loopStack.length - 1]!.exitBlock; } if (target) { @@ -242,15 +297,19 @@ function processBreak(node, currentBlock, S) { return currentBlock; } -function processContinue(node, currentBlock, S) { +function processContinue( + node: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, +): CfgBlockInternal | null { const labelNode = node.childForFieldName('label'); const labelName = labelNode ? labelNode.text : null; - let target = null; + let target: CfgBlockInternal | null = null; if (labelName && S.labelMap.has(labelName)) { - target = S.labelMap.get(labelName).headerBlock; + target = (S.labelMap.get(labelName) as LabelCtx).headerBlock; } else if (S.loopStack.length > 0) { - target = S.loopStack[S.loopStack.length - 1].headerBlock; + target = S.loopStack[S.loopStack.length - 1]!.headerBlock; } if (target) { @@ -261,9 +320,12 @@ function processContinue(node, currentBlock, S) { return currentBlock; } -// ── If / else-if / else ───────────────────────────────────────────────── - -function processIf(ifStmt, currentBlock, S, cfgRules) { +function processIf( + ifStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { currentBlock.endLine = ifStmt.startPosition.row + 1; const condBlock = S.makeBlock( @@ -276,7 +338,6 @@ function processIf(ifStmt, currentBlock, S, cfgRules) { const joinBlock = S.makeBlock('body'); - // True branch const consequentField = cfgRules.ifConsequentField || 'consequence'; const consequent = ifStmt.childForFieldName(consequentField); const trueBlock = S.makeBlock('branch_true', null, null, 'then'); @@ -287,7 +348,6 @@ function processIf(ifStmt, currentBlock, S, cfgRules) { S.addEdge(trueEnd, joinBlock, 'fallthrough'); } - // False branch if (cfgRules.elifNode) { processElifSiblings(ifStmt, condBlock, joinBlock, S, cfgRules); } else { @@ -297,7 +357,13 @@ function processIf(ifStmt, currentBlock, S, cfgRules) { return joinBlock; } -function processAlternative(ifStmt, condBlock, joinBlock, S, cfgRules) { +function processAlternative( + ifStmt: TreeSitterNode, + condBlock: CfgBlockInternal, + joinBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): void { const alternative = ifStmt.childForFieldName('alternative'); if (!alternative) { S.addEdge(condBlock, joinBlock, 'branch_false'); @@ -305,7 +371,6 @@ function processAlternative(ifStmt, condBlock, joinBlock, S, cfgRules) { } if (cfgRules.elseViaAlternative && alternative.type !== cfgRules.elseClause) { - // Pattern C: direct alternative (Go, Java, C#) if (isIfNode(alternative.type, cfgRules)) { const falseBlock = S.makeBlock('branch_false', null, null, 'else-if'); S.addEdge(condBlock, falseBlock, 'branch_false'); @@ -319,15 +384,14 @@ function processAlternative(ifStmt, condBlock, joinBlock, S, cfgRules) { if (falseEnd) S.addEdge(falseEnd, joinBlock, 'fallthrough'); } } else if (alternative.type === cfgRules.elseClause) { - // Pattern A: else_clause wrapper (JS/TS, Rust) - const elseChildren = []; + const elseChildren: TreeSitterNode[] = []; for (let i = 0; i < alternative.namedChildCount; i++) { - elseChildren.push(alternative.namedChild(i)); + elseChildren.push(nn(alternative.namedChild(i))); } - if (elseChildren.length === 1 && isIfNode(elseChildren[0].type, cfgRules)) { + if (elseChildren.length === 1 && isIfNode(elseChildren[0]!.type, cfgRules)) { const falseBlock = S.makeBlock('branch_false', null, null, 'else-if'); S.addEdge(condBlock, falseBlock, 'branch_false'); - const elseIfEnd = processIf(elseChildren[0], falseBlock, S, cfgRules); + const elseIfEnd = processIf(elseChildren[0]!, falseBlock, S, cfgRules); if (elseIfEnd) S.addEdge(elseIfEnd, joinBlock, 'fallthrough'); } else { const falseBlock = S.makeBlock('branch_false', null, null, 'else'); @@ -338,12 +402,18 @@ function processAlternative(ifStmt, condBlock, joinBlock, S, cfgRules) { } } -function processElifSiblings(ifStmt, firstCondBlock, joinBlock, S, cfgRules) { +function processElifSiblings( + ifStmt: TreeSitterNode, + firstCondBlock: CfgBlockInternal, + joinBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): void { let lastCondBlock = firstCondBlock; let foundElse = false; for (let i = 0; i < ifStmt.namedChildCount; i++) { - const child = ifStmt.namedChild(i); + const child = nn(ifStmt.namedChild(i)); if (child.type === cfgRules.elifNode) { const elifCondBlock = S.makeBlock( @@ -368,13 +438,13 @@ function processElifSiblings(ifStmt, firstCondBlock, joinBlock, S, cfgRules) { S.addEdge(lastCondBlock, elseBlock, 'branch_false'); const elseBody = child.childForFieldName('body'); - let elseStmts; + let elseStmts: TreeSitterNode[]; if (elseBody) { elseStmts = getBodyStatements(elseBody, cfgRules); } else { elseStmts = []; for (let j = 0; j < child.namedChildCount; j++) { - elseStmts.push(child.namedChild(j)); + elseStmts.push(nn(child.namedChild(j))); } } const elseEnd = processStatements(elseStmts, elseBlock, S, cfgRules); @@ -389,9 +459,12 @@ function processElifSiblings(ifStmt, firstCondBlock, joinBlock, S, cfgRules) { } } -// ── Loops ─────────────────────────────────────────────────────────────── - -function processForLoop(forStmt, currentBlock, S, cfgRules) { +function processForLoop( + forStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { const headerBlock = S.makeBlock( 'loop_header', forStmt.startPosition.row + 1, @@ -401,7 +474,7 @@ function processForLoop(forStmt, currentBlock, S, cfgRules) { S.addEdge(currentBlock, headerBlock, 'fallthrough'); const loopExitBlock = S.makeBlock('body'); - const loopCtx = { headerBlock, exitBlock: loopExitBlock }; + const loopCtx: LoopCtx = { headerBlock, exitBlock: loopExitBlock }; S.loopStack.push(loopCtx); registerLabelCtx(S, headerBlock, loopExitBlock); @@ -418,7 +491,12 @@ function processForLoop(forStmt, currentBlock, S, cfgRules) { return loopExitBlock; } -function processWhileLoop(whileStmt, currentBlock, S, cfgRules) { +function processWhileLoop( + whileStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { const headerBlock = S.makeBlock( 'loop_header', whileStmt.startPosition.row + 1, @@ -428,7 +506,7 @@ function processWhileLoop(whileStmt, currentBlock, S, cfgRules) { S.addEdge(currentBlock, headerBlock, 'fallthrough'); const loopExitBlock = S.makeBlock('body'); - const loopCtx = { headerBlock, exitBlock: loopExitBlock }; + const loopCtx: LoopCtx = { headerBlock, exitBlock: loopExitBlock }; S.loopStack.push(loopCtx); registerLabelCtx(S, headerBlock, loopExitBlock); @@ -445,14 +523,19 @@ function processWhileLoop(whileStmt, currentBlock, S, cfgRules) { return loopExitBlock; } -function processDoWhileLoop(doStmt, currentBlock, S, cfgRules) { +function processDoWhileLoop( + doStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { const bodyBlock = S.makeBlock('loop_body', doStmt.startPosition.row + 1, null, 'do'); S.addEdge(currentBlock, bodyBlock, 'fallthrough'); const condBlock = S.makeBlock('loop_header', null, null, 'do-while'); const loopExitBlock = S.makeBlock('body'); - const loopCtx = { headerBlock: condBlock, exitBlock: loopExitBlock }; + const loopCtx: LoopCtx = { headerBlock: condBlock, exitBlock: loopExitBlock }; S.loopStack.push(loopCtx); registerLabelCtx(S, condBlock, loopExitBlock); @@ -468,7 +551,12 @@ function processDoWhileLoop(doStmt, currentBlock, S, cfgRules) { return loopExitBlock; } -function processInfiniteLoop(loopStmt, currentBlock, S, cfgRules) { +function processInfiniteLoop( + loopStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { const headerBlock = S.makeBlock( 'loop_header', loopStmt.startPosition.row + 1, @@ -478,7 +566,7 @@ function processInfiniteLoop(loopStmt, currentBlock, S, cfgRules) { S.addEdge(currentBlock, headerBlock, 'fallthrough'); const loopExitBlock = S.makeBlock('body'); - const loopCtx = { headerBlock, exitBlock: loopExitBlock }; + const loopCtx: LoopCtx = { headerBlock, exitBlock: loopExitBlock }; S.loopStack.push(loopCtx); registerLabelCtx(S, headerBlock, loopExitBlock); @@ -490,14 +578,16 @@ function processInfiniteLoop(loopStmt, currentBlock, S, cfgRules) { const bodyEnd = processStatements(bodyStmts, bodyBlock, S, cfgRules); if (bodyEnd) S.addEdge(bodyEnd, headerBlock, 'loop_back'); - // No loop_exit from header — only via break S.loopStack.pop(); return loopExitBlock; } -// ── Switch / match ────────────────────────────────────────────────────── - -function processSwitch(switchStmt, currentBlock, S, cfgRules) { +function processSwitch( + switchStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { currentBlock.endLine = switchStmt.startPosition.row + 1; const switchHeader = S.makeBlock( @@ -509,7 +599,7 @@ function processSwitch(switchStmt, currentBlock, S, cfgRules) { S.addEdge(currentBlock, switchHeader, 'fallthrough'); const joinBlock = S.makeBlock('body'); - const switchCtx = { headerBlock: switchHeader, exitBlock: joinBlock }; + const switchCtx: LoopCtx = { headerBlock: switchHeader, exitBlock: joinBlock }; S.loopStack.push(switchCtx); const switchBody = switchStmt.childForFieldName('body'); @@ -517,7 +607,7 @@ function processSwitch(switchStmt, currentBlock, S, cfgRules) { let hasDefault = false; for (let i = 0; i < container.namedChildCount; i++) { - const caseClause = container.namedChild(i); + const caseClause = nn(container.namedChild(i)); const isDefault = caseClause.type === cfgRules.defaultNode; const isCase = isDefault || isCaseNode(caseClause.type, cfgRules); @@ -541,22 +631,22 @@ function processSwitch(switchStmt, currentBlock, S, cfgRules) { return joinBlock; } -function extractCaseBody(caseClause, cfgRules) { +function extractCaseBody(caseClause: TreeSitterNode, cfgRules: AnyRules): TreeSitterNode[] { const caseBodyNode = caseClause.childForFieldName('body') || caseClause.childForFieldName('consequence'); if (caseBodyNode) { return getBodyStatements(caseBodyNode, cfgRules); } - const stmts = []; + const stmts: TreeSitterNode[] = []; const valueNode = caseClause.childForFieldName('value'); const patternNode = caseClause.childForFieldName('pattern'); for (let j = 0; j < caseClause.namedChildCount; j++) { - const child = caseClause.namedChild(j); + const child = nn(caseClause.namedChild(j)); if (child !== valueNode && child !== patternNode && child.type !== 'switch_label') { if (child.type === 'statement_list') { for (let k = 0; k < child.namedChildCount; k++) { - stmts.push(child.namedChild(k)); + stmts.push(nn(child.namedChild(k))); } } else { stmts.push(child); @@ -566,17 +656,19 @@ function extractCaseBody(caseClause, cfgRules) { return stmts; } -// ── Try / catch / finally ─────────────────────────────────────────────── - -function processTryCatch(tryStmt, currentBlock, S, cfgRules) { +function processTryCatch( + tryStmt: TreeSitterNode, + currentBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): CfgBlockInternal { currentBlock.endLine = tryStmt.startPosition.row + 1; const joinBlock = S.makeBlock('body'); - // Try body const tryBody = tryStmt.childForFieldName('body'); - let tryBodyStart; - let tryStmts; + let tryBodyStart: number; + let tryStmts: TreeSitterNode[]; if (tryBody) { tryBodyStart = tryBody.startPosition.row + 1; tryStmts = getBodyStatements(tryBody, cfgRules); @@ -584,7 +676,7 @@ function processTryCatch(tryStmt, currentBlock, S, cfgRules) { tryBodyStart = tryStmt.startPosition.row + 1; tryStmts = []; for (let i = 0; i < tryStmt.namedChildCount; i++) { - const child = tryStmt.namedChild(i); + const child = nn(tryStmt.namedChild(i)); if (cfgRules.catchNode && child.type === cfgRules.catchNode) continue; if (cfgRules.finallyNode && child.type === cfgRules.finallyNode) continue; tryStmts.push(child); @@ -595,7 +687,6 @@ function processTryCatch(tryStmt, currentBlock, S, cfgRules) { S.addEdge(currentBlock, tryBlock, 'fallthrough'); const tryEnd = processStatements(tryStmts, tryBlock, S, cfgRules); - // Find catch and finally handlers const { catchHandler, finallyHandler } = findTryHandlers(tryStmt, cfgRules); if (catchHandler) { @@ -609,11 +700,14 @@ function processTryCatch(tryStmt, currentBlock, S, cfgRules) { return joinBlock; } -function findTryHandlers(tryStmt, cfgRules) { - let catchHandler = null; - let finallyHandler = null; +function findTryHandlers( + tryStmt: TreeSitterNode, + cfgRules: AnyRules, +): { catchHandler: TreeSitterNode | null; finallyHandler: TreeSitterNode | null } { + let catchHandler: TreeSitterNode | null = null; + let finallyHandler: TreeSitterNode | null = null; for (let i = 0; i < tryStmt.namedChildCount; i++) { - const child = tryStmt.namedChild(i); + const child = nn(tryStmt.namedChild(i)); if (cfgRules.catchNode && child.type === cfgRules.catchNode) catchHandler = child; if (cfgRules.finallyNode && child.type === cfgRules.finallyNode) finallyHandler = child; } @@ -621,25 +715,25 @@ function findTryHandlers(tryStmt, cfgRules) { } function processCatchHandler( - catchHandler, - tryBlock, - tryEnd, - finallyHandler, - joinBlock, - S, - cfgRules, -) { + catchHandler: TreeSitterNode, + tryBlock: CfgBlockInternal, + tryEnd: CfgBlockInternal | null, + finallyHandler: TreeSitterNode | null, + joinBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): void { const catchBlock = S.makeBlock('catch', catchHandler.startPosition.row + 1, null, 'catch'); S.addEdge(tryBlock, catchBlock, 'exception'); const catchBodyNode = catchHandler.childForFieldName('body'); - let catchStmts; + let catchStmts: TreeSitterNode[]; if (catchBodyNode) { catchStmts = getBodyStatements(catchBodyNode, cfgRules); } else { catchStmts = []; for (let i = 0; i < catchHandler.namedChildCount; i++) { - catchStmts.push(catchHandler.namedChild(i)); + catchStmts.push(nn(catchHandler.namedChild(i))); } } const catchEnd = processStatements(catchStmts, catchBlock, S, cfgRules); @@ -666,7 +760,13 @@ function processCatchHandler( } } -function processFinallyOnly(finallyHandler, tryEnd, joinBlock, S, cfgRules) { +function processFinallyOnly( + finallyHandler: TreeSitterNode, + tryEnd: CfgBlockInternal | null, + joinBlock: CfgBlockInternal, + S: FuncState, + cfgRules: AnyRules, +): void { const finallyBlock = S.makeBlock( 'finally', finallyHandler.startPosition.row + 1, @@ -683,12 +783,9 @@ function processFinallyOnly(finallyHandler, tryEnd, joinBlock, S, cfgRules) { if (finallyEnd) S.addEdge(finallyEnd, joinBlock, 'fallthrough'); } -// ── Enter-function body processing ────────────────────────────────────── - -function processFunctionBody(funcNode, S, cfgRules) { +function processFunctionBody(funcNode: TreeSitterNode, S: FuncState, cfgRules: AnyRules): void { const body = funcNode.childForFieldName('body'); if (!body) { - // No body — entry → exit S.blocks.length = 2; S.edges.length = 0; S.addEdge(S.entryBlock, S.exitBlock, 'fallthrough'); @@ -697,8 +794,7 @@ function processFunctionBody(funcNode, S, cfgRules) { } if (!isBlockNode(body.type, cfgRules)) { - // Expression body (e.g., arrow function `(x) => x + 1`) - const bodyBlock = S.blocks[2]; + const bodyBlock = S.blocks[2]!; bodyBlock.startLine = body.startPosition.row + 1; bodyBlock.endLine = body.endPosition.row + 1; S.addEdge(bodyBlock, S.exitBlock, 'fallthrough'); @@ -706,7 +802,6 @@ function processFunctionBody(funcNode, S, cfgRules) { return; } - // Block body — process statements const stmts = getBodyStatements(body, cfgRules); if (stmts.length === 0) { S.blocks.length = 2; @@ -716,7 +811,7 @@ function processFunctionBody(funcNode, S, cfgRules) { return; } - const firstBody = S.blocks[2]; + const firstBody = S.blocks[2]!; const lastBlock = processStatements(stmts, firstBody, S, cfgRules); if (lastBlock) { S.addEdge(lastBlock, S.exitBlock, 'fallthrough'); @@ -724,54 +819,52 @@ function processFunctionBody(funcNode, S, cfgRules) { S.currentBlock = null; } -// ── Visitor factory ───────────────────────────────────────────────────── - -/** - * Create a CFG visitor for use with walkWithVisitors. - * - * @param {object} cfgRules - CFG_RULES for the language - * @returns {Visitor} - */ -export function createCfgVisitor(cfgRules) { - const funcStateStack = []; - let S = null; - const results = []; +export function createCfgVisitor(cfgRules: AnyRules): Visitor { + const funcStateStack: FuncState[] = []; + let S: FuncState | null = null; + const results: CFGResultInternal[] = []; return { name: 'cfg', functionNodeTypes: cfgRules.functionNodes, - enterFunction(funcNode, _funcName, _context) { + enterFunction( + funcNode: TreeSitterNode, + _funcName: string | null, + _context: VisitorContext, + ): void { if (S) funcStateStack.push(S); S = makeFuncState(); S.funcNode = funcNode; processFunctionBody(funcNode, S, cfgRules); }, - exitFunction(funcNode, _funcName, _context) { + exitFunction( + funcNode: TreeSitterNode, + _funcName: string | null, + _context: VisitorContext, + ): void { if (S && S.funcNode === funcNode) { const cyclomatic = S.edges.length - S.blocks.length + 2; results.push({ - funcNode: S.funcNode, + funcNode: S.funcNode as TreeSitterNode, blocks: S.blocks, edges: S.edges, cyclomatic: Math.max(cyclomatic, 1), }); } - S = funcStateStack.length > 0 ? funcStateStack.pop() : null; + S = funcStateStack.length > 0 ? (funcStateStack.pop() as FuncState) : null; }, - enterNode(_node, _context) { - // No-op — all CFG construction is done in enterFunction via processStatements. - // We intentionally do NOT return skipChildren so the walker recurses into - // children, allowing nested functions to trigger enterFunction/exitFunction. + enterNode(_node: TreeSitterNode, _context: VisitorContext): undefined { + // No-op }, - exitNode(_node, _context) { + exitNode(_node: TreeSitterNode, _context: VisitorContext): void { // No-op }, - finish() { + finish(): CFGResultInternal[] { return results; }, }; diff --git a/src/ast-analysis/visitors/complexity-visitor.js b/src/ast-analysis/visitors/complexity-visitor.ts similarity index 57% rename from src/ast-analysis/visitors/complexity-visitor.js rename to src/ast-analysis/visitors/complexity-visitor.ts index ca19c0c5..6dcc9996 100644 --- a/src/ast-analysis/visitors/complexity-visitor.js +++ b/src/ast-analysis/visitors/complexity-visitor.ts @@ -1,65 +1,75 @@ -/** - * Visitor: Compute cognitive/cyclomatic complexity, max nesting, and Halstead metrics. - * - * Replaces the computeAllMetrics() DFS walk in complexity.js with a visitor that - * plugs into the unified walkWithVisitors framework. Operates per-function: - * resets accumulators on enterFunction, emits results on exitFunction. - */ - +import type { EnterNodeResult, TreeSitterNode, Visitor, VisitorContext } from '../../types.js'; import { computeHalsteadDerived, computeLOCMetrics, computeMaintainabilityIndex, } from '../metrics.js'; -// ── Halstead classification ───────────────────────────────────────────── +// biome-ignore lint/suspicious/noExplicitAny: complexity/halstead rules are opaque language-specific objects +type AnyRules = any; + +interface ComplexityAcc { + cognitive: number; + cyclomatic: number; + maxNesting: number; + operators: Map | null; + operands: Map | null; + halsteadSkipDepth: number; +} + +interface PerFunctionResult { + funcNode: TreeSitterNode; + funcName: string | null; + metrics: ReturnType; +} -function classifyHalstead(node, hRules, acc) { +function classifyHalstead(node: TreeSitterNode, hRules: AnyRules, acc: ComplexityAcc): void { const type = node.type; if (hRules.skipTypes.has(type)) acc.halsteadSkipDepth++; if (acc.halsteadSkipDepth > 0) return; - if (hRules.compoundOperators.has(type)) { + if (hRules.compoundOperators.has(type) && acc.operators) { acc.operators.set(type, (acc.operators.get(type) || 0) + 1); } if (node.childCount === 0) { - if (hRules.operatorLeafTypes.has(type)) { + if (hRules.operatorLeafTypes.has(type) && acc.operators) { acc.operators.set(type, (acc.operators.get(type) || 0) + 1); - } else if (hRules.operandLeafTypes.has(type)) { + } else if (hRules.operandLeafTypes.has(type) && acc.operands) { const text = node.text; acc.operands.set(text, (acc.operands.get(text) || 0) + 1); } } } -// ── Branch complexity classification ──────────────────────────────────── - -function classifyBranchNode(node, type, nestingLevel, cRules, acc) { - // Pattern A: else clause wraps if (JS/C#/Rust) +function classifyBranchNode( + node: TreeSitterNode, + type: string, + nestingLevel: number, + cRules: AnyRules, + acc: ComplexityAcc, +): void { if (cRules.elseNodeType && type === cRules.elseNodeType) { const firstChild = node.namedChild(0); if (firstChild && firstChild.type === cRules.ifNodeType) { - // else-if: the if_statement child handles its own increment return; } acc.cognitive++; return; } - // Pattern B: explicit elif node (Python/Ruby/PHP) if (cRules.elifNodeType && type === cRules.elifNodeType) { acc.cognitive++; acc.cyclomatic++; return; } - // Detect else-if via Pattern A or C let isElseIf = false; if (type === cRules.ifNodeType) { if (cRules.elseViaAlternative) { isElseIf = node.parent?.type === cRules.ifNodeType && - node.parent.childForFieldName('alternative')?.id === node.id; + // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface + (node.parent!.childForFieldName('alternative') as any)?.id === (node as any).id; } else if (cRules.elseNodeType) { isElseIf = node.parent?.type === cRules.elseNodeType; } @@ -71,7 +81,6 @@ function classifyBranchNode(node, type, nestingLevel, cRules, acc) { return; } - // Regular branch node acc.cognitive += 1 + nestingLevel; acc.cyclomatic++; @@ -80,27 +89,41 @@ function classifyBranchNode(node, type, nestingLevel, cRules, acc) { } } -// ── Plain-else detection (Pattern C: Go/Java) ────────────────────────── - -function classifyPlainElse(node, type, cRules, acc) { +function classifyPlainElse( + node: TreeSitterNode, + type: string, + cRules: AnyRules, + acc: ComplexityAcc, +): void { if ( cRules.elseViaAlternative && type !== cRules.ifNodeType && node.parent?.type === cRules.ifNodeType && - node.parent.childForFieldName('alternative')?.id === node.id + // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface + (node.parent!.childForFieldName('alternative') as any)?.id === (node as any).id ) { acc.cognitive++; } } -// ── Result collection ─────────────────────────────────────────────────── - -function collectResult(funcNode, acc, hRules, langId) { +function collectResult( + funcNode: TreeSitterNode | { text: string }, + acc: ComplexityAcc, + hRules: AnyRules | null | undefined, + langId: string | null, +): { + cognitive: number; + cyclomatic: number; + maxNesting: number; + halstead: ReturnType | null; + loc: ReturnType; + mi: number; +} { const halstead = hRules && acc.operators && acc.operands ? computeHalsteadDerived(acc.operators, acc.operands) : null; - const loc = computeLOCMetrics(funcNode, langId); + const loc = computeLOCMetrics(funcNode as TreeSitterNode, langId ?? undefined); const volume = halstead ? halstead.volume : 0; const commentRatio = loc.loc > 0 ? loc.commentLines / loc.loc : 0; const mi = computeMaintainabilityIndex(volume, acc.cyclomatic, loc.sloc, commentRatio); @@ -115,7 +138,7 @@ function collectResult(funcNode, acc, hRules, langId) { }; } -function resetAccumulators(hRules) { +function resetAccumulators(hRules: AnyRules | null | undefined): ComplexityAcc { return { cognitive: 0, cyclomatic: 1, @@ -126,35 +149,28 @@ function resetAccumulators(hRules) { }; } -// ── Visitor factory ───────────────────────────────────────────────────── - -/** - * Create a complexity visitor for use with walkWithVisitors. - * - * When used in file-level mode (walking an entire file), this visitor collects - * per-function metrics using enterFunction/exitFunction hooks. When used in - * function-level mode (walking a single function node), it collects one result. - * - * @param {object} cRules - COMPLEXITY_RULES for the language - * @param {object} [hRules] - HALSTEAD_RULES for the language (null if unavailable) - * @param {object} [options] - * @param {boolean} [options.fileLevelWalk=false] - true when walking an entire file - * @returns {Visitor} - */ -export function createComplexityVisitor(cRules, hRules, options = {}) { +export function createComplexityVisitor( + cRules: AnyRules, + hRules?: AnyRules | null, + options: { fileLevelWalk?: boolean; langId?: string | null } = {}, +): Visitor { const { fileLevelWalk = false, langId = null } = options; let acc = resetAccumulators(hRules); - let activeFuncNode = null; - let activeFuncName = null; + let activeFuncNode: TreeSitterNode | null = null; + let activeFuncName: string | null = null; let funcDepth = 0; - const results = []; + const results: PerFunctionResult[] = []; return { name: 'complexity', functionNodeTypes: cRules.functionNodes, - enterFunction(funcNode, funcName, _context) { + enterFunction( + funcNode: TreeSitterNode, + funcName: string | null, + _context: VisitorContext, + ): void { if (fileLevelWalk) { if (!activeFuncNode) { acc = resetAccumulators(hRules); @@ -169,7 +185,11 @@ export function createComplexityVisitor(cRules, hRules, options = {}) { } }, - exitFunction(funcNode, _funcName, _context) { + exitFunction( + funcNode: TreeSitterNode, + _funcName: string | null, + _context: VisitorContext, + ): void { if (fileLevelWalk) { if (funcNode === activeFuncNode) { results.push({ @@ -187,7 +207,7 @@ export function createComplexityVisitor(cRules, hRules, options = {}) { } }, - enterNode(node, context) { + enterNode(node: TreeSitterNode, context: VisitorContext): EnterNodeResult | undefined { if (fileLevelWalk && !activeFuncNode) return; const type = node.type; @@ -197,7 +217,6 @@ export function createComplexityVisitor(cRules, hRules, options = {}) { if (nestingLevel > acc.maxNesting) acc.maxNesting = nestingLevel; - // Logical operators in binary expressions if (type === cRules.logicalNodeType) { const op = node.child(1)?.type; if (op && cRules.logicalOperators.has(op)) { @@ -212,28 +231,24 @@ export function createComplexityVisitor(cRules, hRules, options = {}) { } } - // Optional chaining (cyclomatic only) if (type === cRules.optionalChainType) acc.cyclomatic++; - // Branch/control flow nodes (skip keyword leaf tokens) if (cRules.branchNodes.has(type) && node.childCount > 0) { classifyBranchNode(node, type, nestingLevel, cRules, acc); } - // Pattern C plain else (Go/Java) classifyPlainElse(node, type, cRules, acc); - // Case nodes (cyclomatic only, skip keyword leaves) if (cRules.caseNodes.has(type) && node.childCount > 0) acc.cyclomatic++; }, - exitNode(node) { + exitNode(node: TreeSitterNode): void { if (hRules?.skipTypes.has(node.type)) acc.halsteadSkipDepth--; }, - finish() { + finish(): PerFunctionResult[] | ReturnType { if (fileLevelWalk) return results; - return collectResult({ text: '' }, acc, hRules, langId); + return collectResult({ text: '' } as TreeSitterNode, acc, hRules, langId); }, }; } diff --git a/src/ast-analysis/visitors/dataflow-visitor.js b/src/ast-analysis/visitors/dataflow-visitor.ts similarity index 65% rename from src/ast-analysis/visitors/dataflow-visitor.js rename to src/ast-analysis/visitors/dataflow-visitor.ts index 644490be..72ba64db 100644 --- a/src/ast-analysis/visitors/dataflow-visitor.js +++ b/src/ast-analysis/visitors/dataflow-visitor.ts @@ -1,15 +1,4 @@ -/** - * Visitor: Extract dataflow information (define-use chains, arg flows, mutations). - * - * Replaces the standalone extractDataflow() visit logic in dataflow.js with a - * visitor that plugs into the unified walkWithVisitors framework. - * - * NOTE: The original dataflow walk uses `node.namedChildren` while the visitor - * framework uses `node.child(i)` (all children). This visitor handles both - * named and unnamed children correctly since the classification logic only - * cares about specific node types/fields, not about traversal order. - */ - +import type { EnterNodeResult, TreeSitterNode, Visitor, VisitorContext } from '../../types.js'; import { collectIdentifiers, extractParamNames, @@ -21,24 +10,92 @@ import { truncate, } from '../visitor-utils.js'; -// ── Scope helpers ─────────────────────────────────────────────────────── +// biome-ignore lint/suspicious/noExplicitAny: dataflow rules are opaque language-specific objects +type AnyRules = any; + +interface ScopeEntry { + funcName: string | null; + funcNode: TreeSitterNode; + params: Map; + locals: Map; +} + +interface Binding { + type: string; + index?: number; + source?: { type: string; callee?: string } | null; + funcName: string | null; +} + +interface DataflowParam { + funcName: string; + paramName: string; + paramIndex: number; + line: number; +} + +interface DataflowReturnEntry { + funcName: string; + expression: string; + referencedNames: string[]; + line: number; +} + +interface DataflowAssignment { + varName: string; + callerFunc: string; + sourceCallName: string; + expression: string; + line: number; +} -function currentScope(scopeStack) { - return scopeStack.length > 0 ? scopeStack[scopeStack.length - 1] : null; +interface DataflowArgFlow { + callerFunc: string; + calleeName: string; + argIndex: number; + argName: string; + binding: Binding; + confidence: number; + expression: string; + line: number; } -function findBinding(name, scopeStack) { +interface DataflowMutation { + funcName: string; + receiverName: string; + binding: Binding; + mutatingExpr: string; + line: number; +} + +interface DataflowResultInternal { + parameters: DataflowParam[]; + returns: DataflowReturnEntry[]; + assignments: DataflowAssignment[]; + argFlows: DataflowArgFlow[]; + mutations: DataflowMutation[]; +} + +function currentScope(scopeStack: ScopeEntry[]): ScopeEntry | undefined { + return scopeStack[scopeStack.length - 1]; +} + +function findBinding(name: string, scopeStack: ScopeEntry[]): Binding | null { for (let i = scopeStack.length - 1; i >= 0; i--) { - const scope = scopeStack[i]; + const scope = scopeStack[i]!; if (scope.params.has(name)) - return { type: 'param', index: scope.params.get(name), funcName: scope.funcName }; + return { type: 'param', index: scope.params.get(name) as number, funcName: scope.funcName }; if (scope.locals.has(name)) - return { type: 'local', source: scope.locals.get(name), funcName: scope.funcName }; + return { + type: 'local', + source: scope.locals.get(name) as { type: string; callee?: string }, + funcName: scope.funcName, + }; } return null; } -function bindingConfidence(binding) { +function bindingConfidence(binding: Binding | null): number { if (!binding) return 0.5; if (binding.type === 'param') return 1.0; if (binding.type === 'local') { @@ -49,29 +106,33 @@ function bindingConfidence(binding) { return 0.5; } -// ── Node helpers ──────────────────────────────────────────────────────── - -function unwrapAwait(node, rules) { +function unwrapAwait(node: TreeSitterNode, rules: AnyRules): TreeSitterNode { if (rules.awaitNode && node.type === rules.awaitNode) { return node.namedChildren[0] || node; } return node; } -function isCall(node, isCallNode) { - return node && isCallNode(node.type); +function isCall(node: TreeSitterNode | null, isCallNode: (t: string) => boolean): boolean { + return node != null && isCallNode(node.type); } -// ── Node handlers ─────────────────────────────────────────────────────── - -function handleVarDeclarator(node, rules, scopeStack, assignments, isCallNode) { +function handleVarDeclarator( + node: TreeSitterNode, + rules: AnyRules, + scopeStack: ScopeEntry[], + assignments: DataflowAssignment[], + isCallNode: (t: string) => boolean, +): void { let nameNode = node.childForFieldName(rules.varNameField); - let valueNode = rules.varValueField ? node.childForFieldName(rules.varValueField) : null; + let valueNode: TreeSitterNode | null = rules.varValueField + ? node.childForFieldName(rules.varValueField) + : null; if (!valueNode && rules.equalsClauseType) { for (const child of node.namedChildren) { if (child.type === rules.equalsClauseType) { - valueNode = child.childForFieldName('value') || child.namedChildren[0]; + valueNode = child.childForFieldName('value') || child.namedChildren[0] || null; break; } } @@ -87,8 +148,10 @@ function handleVarDeclarator(node, rules, scopeStack, assignments, isCallNode) { } if (rules.expressionListType) { - if (nameNode?.type === rules.expressionListType) nameNode = nameNode.namedChildren[0]; - if (valueNode?.type === rules.expressionListType) valueNode = valueNode.namedChildren[0]; + if (nameNode && nameNode.type === rules.expressionListType) + nameNode = nameNode.namedChildren[0] ?? null; + if (valueNode && valueNode.type === rules.expressionListType) + valueNode = valueNode.namedChildren[0] ?? null; } const scope = currentScope(scopeStack); @@ -133,7 +196,14 @@ function handleVarDeclarator(node, rules, scopeStack, assignments, isCallNode) { } } -function handleAssignment(node, rules, scopeStack, assignments, mutations, isCallNode) { +function handleAssignment( + node: TreeSitterNode, + rules: AnyRules, + scopeStack: ScopeEntry[], + assignments: DataflowAssignment[], + mutations: DataflowMutation[], + isCallNode: (t: string) => boolean, +): void { const left = node.childForFieldName(rules.assignLeftField); const right = node.childForFieldName(rules.assignRightField); const scope = currentScope(scopeStack); @@ -174,7 +244,12 @@ function handleAssignment(node, rules, scopeStack, assignments, mutations, isCal } } -function handleCallExpr(node, rules, scopeStack, argFlows) { +function handleCallExpr( + node: TreeSitterNode, + rules: AnyRules, + scopeStack: ScopeEntry[], + argFlows: DataflowArgFlow[], +): void { const callee = resolveCalleeName(node, rules); const argsNode = node.childForFieldName(rules.callArgsField); const scope = currentScope(scopeStack); @@ -218,13 +293,19 @@ function handleCallExpr(node, rules, scopeStack, argFlows) { } } -function handleExprStmtMutation(node, rules, scopeStack, mutations, isCallNode) { +function handleExprStmtMutation( + node: TreeSitterNode, + rules: AnyRules, + scopeStack: ScopeEntry[], + mutations: DataflowMutation[], + isCallNode: (t: string) => boolean, +): void { if (rules.mutatingMethods.size === 0) return; const expr = node.namedChildren[0]; if (!expr || !isCall(expr, isCallNode)) return; - let methodName = null; - let receiver = null; + let methodName: string | null = null; + let receiver: string | null = null; const fn = expr.childForFieldName(rules.callFunctionField); if (fn && fn.type === rules.memberNode) { @@ -259,15 +340,18 @@ function handleExprStmtMutation(node, rules, scopeStack, mutations, isCallNode) } } -// ── Return statement handler ──────────────────────────────────────────── - -function handleReturn(node, rules, scopeStack, returns) { - if (node.parent?.type === rules.returnNode) return; // keyword token, not statement +function handleReturn( + node: TreeSitterNode, + rules: AnyRules, + scopeStack: ScopeEntry[], + returns: DataflowReturnEntry[], +): void { + if (node.parent?.type === rules.returnNode) return; const scope = currentScope(scopeStack); if (scope?.funcName) { const expr = node.namedChildren[0]; - const referencedNames = []; + const referencedNames: string[] = []; if (expr) collectIdentifiers(expr, referencedNames, rules); returns.push({ funcName: scope.funcName, @@ -278,33 +362,31 @@ function handleReturn(node, rules, scopeStack, returns) { } } -// ── Visitor factory ───────────────────────────────────────────────────── - -/** - * Create a dataflow visitor for use with walkWithVisitors. - * - * @param {object} rules - DATAFLOW_RULES for the language - * @returns {Visitor} - */ -export function createDataflowVisitor(rules) { - const isCallNode = rules.callNodes ? (t) => rules.callNodes.has(t) : (t) => t === rules.callNode; +export function createDataflowVisitor(rules: AnyRules): Visitor { + const isCallNode: (t: string) => boolean = rules.callNodes + ? (t: string) => rules.callNodes.has(t) + : (t: string) => t === rules.callNode; - const parameters = []; - const returns = []; - const assignments = []; - const argFlows = []; - const mutations = []; - const scopeStack = []; + const parameters: DataflowParam[] = []; + const returns: DataflowReturnEntry[] = []; + const assignments: DataflowAssignment[] = []; + const argFlows: DataflowArgFlow[] = []; + const mutations: DataflowMutation[] = []; + const scopeStack: ScopeEntry[] = []; return { name: 'dataflow', functionNodeTypes: rules.functionNodes, - enterFunction(funcNode, _funcName, _context) { + enterFunction( + funcNode: TreeSitterNode, + _funcName: string | null, + _context: VisitorContext, + ): void { const name = functionName(funcNode, rules); const paramsNode = funcNode.childForFieldName(rules.paramListField); const paramList = extractParams(paramsNode, rules); - const paramMap = new Map(); + const paramMap = new Map(); for (const p of paramList) { paramMap.set(p.name, p.index); if (name) { @@ -319,11 +401,15 @@ export function createDataflowVisitor(rules) { scopeStack.push({ funcName: name, funcNode, params: paramMap, locals: new Map() }); }, - exitFunction(_funcNode, _funcName, _context) { + exitFunction( + _funcNode: TreeSitterNode, + _funcName: string | null, + _context: VisitorContext, + ): void { scopeStack.pop(); }, - enterNode(node, _context) { + enterNode(node: TreeSitterNode, _context: VisitorContext): EnterNodeResult | undefined { const t = node.type; if (rules.functionNodes.has(t)) return; @@ -357,7 +443,7 @@ export function createDataflowVisitor(rules) { } }, - finish() { + finish(): DataflowResultInternal { return { parameters, returns, assignments, argFlows, mutations }; }, }; diff --git a/src/types.ts b/src/types.ts index 4505956e..df2561cf 100644 --- a/src/types.ts +++ b/src/types.ts @@ -390,6 +390,16 @@ export interface HalsteadMetrics { bugs: number; } +/** Halstead derived metrics including raw counts. */ +export interface HalsteadDerivedMetrics extends HalsteadMetrics { + n1: number; + n2: number; + bigN1: number; + bigN2: number; + vocabulary: number; + length: number; +} + /** Lines-of-code metrics. */ export interface LOCMetrics { loc: number; @@ -635,30 +645,111 @@ export type ASTTypeMap = Map; /** Complexity rules for a language. */ export interface ComplexityRules { branchNodes: Set; + caseNodes: Set; + logicalOperators: Set; + logicalNodeType: string | null; + optionalChainType: string | null; nestingNodes: Set; functionNodes: Set; + ifNodeType: string | null; + elseNodeType: string | null; + elifNodeType: string | null; + elseViaAlternative: boolean; + switchLikeNodes: Set; } /** Halstead rules for a language. */ export interface HalsteadRules { - operators: Set; - operands: Set; -} - -/** CFG rules for a language. */ -export interface CfgRules { - controlFlowNodes: Set; + operatorLeafTypes: Set; + operandLeafTypes: Set; + compoundOperators: Set; + skipTypes: Set; +} + +/** CFG rules for a language (merged result of CFG_DEFAULTS + overrides). */ +export interface CfgRulesConfig { + ifNode: string | null; + ifNodes: Set | null; + elifNode: string | null; + elseClause: string | null; + elseViaAlternative: boolean; + ifConsequentField: string | null; + forNodes: Set; + whileNode: string | null; + whileNodes: Set | null; + doNode: string | null; + infiniteLoopNode: string | null; + unlessNode: string | null; + untilNode: string | null; + switchNode: string | null; + switchNodes: Set | null; + caseNode: string | null; + caseNodes: Set | null; + defaultNode: string | null; + tryNode: string | null; + catchNode: string | null; + finallyNode: string | null; + returnNode: string | null; + throwNode: string | null; + breakNode: string | null; + continueNode: string | null; + blockNode: string | null; + blockNodes: Set | null; + labeledNode: string | null; functionNodes: Set; } -/** Dataflow rules for a language. */ -export interface DataflowRules { - variableDeclarators: Set; - parameterNodes: Set; - callNodes: Set; - memberNodes: Set; - returnNodes: Set; - awaitNodes: Set; +/** Dataflow rules for a language (merged result of DATAFLOW_DEFAULTS + overrides). */ +export interface DataflowRulesConfig { + functionNodes: Set; + nameField: string; + varAssignedFnParent: string | null; + assignmentFnParent: string | null; + pairFnParent: string | null; + paramListField: string; + paramIdentifier: string; + paramWrapperTypes: Set; + defaultParamType: string | null; + restParamType: string | null; + objectDestructType: string | null; + arrayDestructType: string | null; + shorthandPropPattern: string | null; + pairPatternType: string | null; + extractParamName: ((node: TreeSitterNode) => string[] | null) | null; + returnNode: string | null; + varDeclaratorNode: string | null; + varDeclaratorNodes: Set | null; + varNameField: string; + varValueField: string; + assignmentNode: string | null; + assignLeftField: string; + assignRightField: string; + callNode: string | null; + callNodes: Set | null; + callFunctionField: string; + callArgsField: string; + spreadType: string | null; + memberNode: string | null; + memberObjectField: string; + memberPropertyField: string; + optionalChainNode: string | null; + awaitNode: string | null; + mutatingMethods: Set; + expressionStmtNode: string; + callObjectField: string | null; + expressionListType: string | null; + equalsClauseType: string | null; + argumentWrapperType: string | null; + extraIdentifierTypes: Set | null; +} + +/** Language rule module: exports from each language rule file. */ +export interface LanguageRuleModule { + complexity: ComplexityRules; + halstead: HalsteadRules; + cfg: CfgRulesConfig; + dataflow: DataflowRulesConfig; + astTypes: Record | null; } /** A basic block in a control flow graph. */ From fa3ec4f84c2a010e734f3f4e822894d51f02d4fc Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 23 Mar 2026 20:12:27 -0600 Subject: [PATCH 02/26] feat(types): migrate features/ to TypeScript (Phase 6b) Migrate all 20 remaining features/ files from JavaScript to TypeScript: - shared/find-nodes, ast, audit, batch, boundaries, branch-compare - cfg, check, cochange, communities, complexity, dataflow - flow, graph-enrichment, manifesto, owners, sequence - snapshot, structure, triage Add inline type definitions for feature-specific interfaces (options, results, intermediate data structures). All exports typed with explicit parameter and return types. Update ROADMAP progress to 114/283 (~40%). Impact: 744 functions changed, 71 affected --- docs/roadmap/ROADMAP.md | 7 +- src/features/{ast.js => ast.ts} | 178 +++++--- src/features/{audit.js => audit.ts} | 229 +++++++--- src/features/batch.js | 156 ------- src/features/batch.ts | 182 ++++++++ src/features/{boundaries.js => boundaries.ts} | 217 +++++----- .../{branch-compare.js => branch-compare.ts} | 234 ++++++---- src/features/{cfg.js => cfg.ts} | 234 ++++++---- src/features/{check.js => check.ts} | 213 ++++++--- src/features/{cochange.js => cochange.ts} | 214 ++++----- .../{communities.js => communities.ts} | 133 +++--- src/features/{complexity.js => complexity.ts} | 408 +++++++++++------- src/features/{dataflow.js => dataflow.ts} | 379 ++++++++++------ src/features/{flow.js => flow.ts} | 108 +++-- ...raph-enrichment.js => graph-enrichment.ts} | 198 +++++++-- src/features/{manifesto.js => manifesto.ts} | 215 +++++---- src/features/{owners.js => owners.ts} | 161 +++---- src/features/{sequence.js => sequence.ts} | 182 +++++--- .../shared/{find-nodes.js => find-nodes.ts} | 23 +- src/features/{snapshot.js => snapshot.ts} | 75 ++-- src/features/{structure.js => structure.ts} | 361 ++++++++++++---- src/features/{triage.js => triage.ts} | 122 ++++-- 22 files changed, 2641 insertions(+), 1588 deletions(-) rename src/features/{ast.js => ast.ts} (67%) rename src/features/{audit.js => audit.ts} (60%) delete mode 100644 src/features/batch.js create mode 100644 src/features/batch.ts rename src/features/{boundaries.js => boundaries.ts} (63%) rename src/features/{branch-compare.js => branch-compare.ts} (70%) rename src/features/{cfg.js => cfg.ts} (68%) rename src/features/{check.js => check.ts} (69%) rename src/features/{cochange.js => cochange.ts} (72%) rename src/features/{communities.js => communities.ts} (65%) rename src/features/{complexity.js => complexity.ts} (71%) rename src/features/{dataflow.js => dataflow.ts} (64%) rename src/features/{flow.js => flow.ts} (73%) rename src/features/{graph-enrichment.js => graph-enrichment.ts} (68%) rename src/features/{manifesto.js => manifesto.ts} (68%) rename src/features/{owners.js => owners.ts} (72%) rename src/features/{sequence.js => sequence.ts} (64%) rename src/features/shared/{find-nodes.js => find-nodes.ts} (51%) rename src/features/{snapshot.js => snapshot.ts} (63%) rename src/features/{structure.js => structure.ts} (73%) rename src/features/{triage.js => triage.ts} (58%) diff --git a/docs/roadmap/ROADMAP.md b/docs/roadmap/ROADMAP.md index b20feff0..b95ab37a 100644 --- a/docs/roadmap/ROADMAP.md +++ b/docs/roadmap/ROADMAP.md @@ -1080,7 +1080,7 @@ npm workspaces (`package.json` `workspaces`), `pnpm-workspace.yaml`, and `lerna. ## Phase 5 -- TypeScript Migration -> **Status:** In Progress — 94 of 283 source files migrated (~33%), 189 `.js` files remaining +> **Status:** In Progress — 114 of 283 source files migrated (~40%), 169 `.js` files remaining **Goal:** Migrate the codebase from plain JavaScript to TypeScript, leveraging the clean module boundaries established in Phase 3. Incremental module-by-module migration starting from leaf modules inward. @@ -1151,16 +1151,15 @@ Migrate modules that implement domain logic and Phase 3 interfaces. Some migrate Migrate top-level orchestration, features, and entry points. Some migrated via [#555](https://github.com/optave/codegraph/pull/555), 141 files remaining. -**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts`, `ast-analysis/` (all 18 files — engine, metrics, shared, visitor, visitor-utils, 9 language rules, 4 visitors) +**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts`, `ast-analysis/` (all 18 files), `features/` (all 20 files — ast, audit, batch, boundaries, branch-compare, cfg, check, cochange, communities, complexity, dataflow, flow, graph-enrichment, manifesto, owners, sequence, snapshot, structure, triage, shared/find-nodes) -**Remaining (141):** +**Remaining (121):** | Module | Files | Notes | |--------|-------|-------| | `cli.js` + `cli/` | 55 | Commander entry point, 43 command handlers (`commands/`), barrel, shared CLI utilities | | `mcp/tools/` | 36 | Individual MCP tool handlers + barrel | | `presentation/` | 28 | Presentation formatters (14 files), `queries-cli/` (7 files), sequence-renderer, viewer, export, etc. | -| `features/` | 21 | audit, batch, boundaries, cfg, check, cochange, communities, complexity, dataflow, flow, graph-enrichment, manifesto, owners, sequence, snapshot, structure, triage, ast, branch-compare, `shared/find-nodes` | | `index.js` | 1 | Public API exports (stale — `.ts` exists) | **Stale `.js` counterparts to delete (13 files):** `domain/graph/builder.js`, `domain/graph/builder/{context,helpers,pipeline}.js`, `domain/graph/watcher.js`, `domain/search/{generator,index,models}.js`, `features/export.js`, `mcp/{index,middleware,server,tool-registry}.js` — these have `.ts` counterparts already diff --git a/src/features/ast.js b/src/features/ast.ts similarity index 67% rename from src/features/ast.js rename to src/features/ast.ts index ad9a6270..b2abf326 100644 --- a/src/features/ast.js +++ b/src/features/ast.ts @@ -1,11 +1,3 @@ -/** - * Stored queryable AST nodes — build-time extraction + query functions. - * - * Persists selected AST nodes (calls, new, string, regex, throw, await) in the - * `ast_nodes` table during build. Queryable via CLI (`codegraph ast`), MCP - * (`ast_query`), and programmatic API. - */ - import path from 'node:path'; import { AST_TYPE_MAPS } from '../ast-analysis/rules/index.js'; import { buildExtensionSet } from '../ast-analysis/shared.js'; @@ -16,12 +8,13 @@ import { buildFileConditionSQL } from '../db/query-builder.js'; import { debug } from '../infrastructure/logger.js'; import { outputResult } from '../infrastructure/result-formatter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { ASTNodeKind, BetterSqlite3Database, Definition, TreeSitterNode } from '../types.js'; // ─── Constants ──────────────────────────────────────────────────────── -export const AST_NODE_KINDS = ['call', 'new', 'string', 'regex', 'throw', 'await']; +export const AST_NODE_KINDS: ASTNodeKind[] = ['call', 'new', 'string', 'regex', 'throw', 'await']; -const KIND_ICONS = { +const KIND_ICONS: Record = { call: '\u0192', // ƒ new: '\u2295', // ⊕ string: '"', @@ -30,24 +23,35 @@ const KIND_ICONS = { await: '\u22B3', // ⊳ }; -/** tree-sitter node types that map to our AST node kinds — imported from rules. */ const JS_TS_AST_TYPES = AST_TYPE_MAPS.get('javascript'); -/** Extensions that support full AST walk (new/throw/await/string/regex). */ const WALK_EXTENSIONS = buildExtensionSet(AST_TYPE_MAPS); // ─── Helpers ────────────────────────────────────────────────────────── -// Node extraction helpers (extractNewName, extractName, etc.) moved to -// ast-analysis/visitors/ast-store-visitor.js as part of the visitor framework. - -/** - * Find the narrowest enclosing definition for a given line. - */ -function findParentDef(defs, line) { - let best = null; + +interface AstRow { + file: string; + line: number; + kind: string; + name: string; + text: string | null; + receiver: string | null; + parentNodeId: number | null; +} + +interface FileSymbols { + definitions: Definition[]; + calls?: Array<{ line: number; name: string; dynamic?: boolean; receiver?: string }>; + astNodes?: Array<{ line: number; kind: string; name: string; text?: string; receiver?: string }>; + _tree?: { rootNode: TreeSitterNode }; + _langId?: string; +} + +function findParentDef(defs: Definition[], line: number): Definition | null { + let best: Definition | null = null; for (const def of defs) { if (def.line <= line && (def.endLine == null || def.endLine >= line)) { - if (!best || def.endLine - def.line < best.endLine - best.line) { + if (!best || (def.endLine ?? 0) - def.line < (best.endLine ?? 0) - best.line) { best = def; } } @@ -57,17 +61,13 @@ function findParentDef(defs, line) { // ─── Build ──────────────────────────────────────────────────────────── -/** - * Extract AST nodes from parsed files and persist to the ast_nodes table. - * - * @param {object} db - open better-sqlite3 database (read-write) - * @param {Map} fileSymbols - Map - * @param {string} rootDir - absolute project root path - * @param {object} [_engineOpts] - engine options (unused) - */ -export async function buildAstNodes(db, fileSymbols, _rootDir, _engineOpts) { - // Ensure table exists (migration may not have run on older DBs) - let insertStmt; +export async function buildAstNodes( + db: BetterSqlite3Database, + fileSymbols: Map, + _rootDir: string, + _engineOpts?: unknown, +): Promise { + let insertStmt: ReturnType; try { insertStmt = db.prepare( 'INSERT INTO ast_nodes (file, line, kind, name, text, receiver, parent_node_id) VALUES (?, ?, ?, ?, ?, ?, ?)', @@ -77,28 +77,26 @@ export async function buildAstNodes(db, fileSymbols, _rootDir, _engineOpts) { return; } - const tx = db.transaction((rows) => { + const tx = db.transaction((rows: AstRow[]) => { for (const r of rows) { insertStmt.run(r.file, r.line, r.kind, r.name, r.text, r.receiver, r.parentNodeId); } }); - const allRows = []; + const allRows: AstRow[] = []; for (const [relPath, symbols] of fileSymbols) { const defs = symbols.definitions || []; - // Pre-load all node IDs for this file into a map (read-only, fast) - const nodeIdMap = new Map(); + const nodeIdMap = new Map(); for (const row of bulkNodeIdsByFile(db, relPath)) { nodeIdMap.set(`${row.name}|${row.kind}|${row.line}`, row.id); } - // 1. Call nodes from symbols.calls (all languages) if (symbols.calls) { for (const call of symbols.calls) { const parentDef = findParentDef(defs, call.line); - let parentNodeId = null; + let parentNodeId: number | null = null; if (parentDef) { parentNodeId = nodeIdMap.get(`${parentDef.name}|${parentDef.kind}|${parentDef.line}`) || null; @@ -115,12 +113,10 @@ export async function buildAstNodes(db, fileSymbols, _rootDir, _engineOpts) { } } - // 2. Non-call AST nodes (new, throw, await, string, regex) if (symbols.astNodes?.length) { - // Native path: use pre-extracted AST nodes from Rust (all languages) for (const n of symbols.astNodes) { const parentDef = findParentDef(defs, n.line); - let parentNodeId = null; + let parentNodeId: number | null = null; if (parentDef) { parentNodeId = nodeIdMap.get(`${parentDef.name}|${parentDef.kind}|${parentDef.line}`) || null; @@ -136,10 +132,9 @@ export async function buildAstNodes(db, fileSymbols, _rootDir, _engineOpts) { }); } } else { - // WASM fallback: walk the tree-sitter AST (JS/TS/TSX only) const ext = path.extname(relPath).toLowerCase(); if (WALK_EXTENSIONS.has(ext) && symbols._tree) { - const astRows = []; + const astRows: AstRow[] = []; walkAst(symbols._tree.rootNode, defs, relPath, astRows, nodeIdMap); allRows.push(...astRows); } @@ -153,37 +148,78 @@ export async function buildAstNodes(db, fileSymbols, _rootDir, _engineOpts) { debug(`AST extraction: ${allRows.length} nodes stored`); } -/** - * Walk a tree-sitter AST and collect new/throw/await/string/regex nodes. - * Delegates to the ast-store visitor via the unified walker. - */ -function walkAst(rootNode, defs, relPath, rows, nodeIdMap) { - const visitor = createAstStoreVisitor(JS_TS_AST_TYPES, defs, relPath, nodeIdMap); +function walkAst( + rootNode: TreeSitterNode, + defs: Definition[], + relPath: string, + rows: AstRow[], + nodeIdMap: Map, +): void { + const visitor = createAstStoreVisitor(JS_TS_AST_TYPES!, defs, relPath, nodeIdMap); const results = walkWithVisitors(rootNode, [visitor], 'javascript'); - const collected = results['ast-store'] || []; + const collected = (results['ast-store'] || []) as AstRow[]; rows.push(...collected); } // ─── Query ──────────────────────────────────────────────────────────── -/** - * Query AST nodes — data-returning function. - * - * @param {string} [pattern] - GLOB pattern for node name (auto-wrapped in *..*) - * @param {string} [customDbPath] - path to graph.db - * @param {object} [opts] - * @returns {{ pattern, kind, count, results, _pagination? }} - */ -export function astQueryData(pattern, customDbPath, opts = {}) { +interface AstQueryRow { + kind: string; + name: string; + file: string; + line: number; + text: string | null; + receiver: string | null; + parent_node_id: number | null; + parent_name: string | null; + parent_kind: string | null; + parent_file: string | null; +} + +interface AstQueryResult { + kind: string; + name: string; + file: string; + line: number; + text: string | null; + receiver: string | null; + parent: { name: string | null; kind: string | null; file: string | null } | null; +} + +interface AstQueryOpts { + kind?: string; + file?: string | string[]; + noTests?: boolean; + limit?: number; + offset?: number; + json?: boolean; + ndjson?: boolean; +} + +export function astQueryData( + pattern: string | undefined, + customDbPath: string | undefined, + opts: AstQueryOpts = {}, +): { + pattern: string; + kind: string | null; + count: number; + results: AstQueryResult[]; + _pagination?: { + hasMore: boolean; + total: number; + offset: number; + returned: number; + limit: number; + }; +} { const db = openReadonlyOrFail(customDbPath); const { kind, file, noTests, limit, offset } = opts; let where = 'WHERE 1=1'; - const params = []; + const params: unknown[] = []; - // Pattern matching if (pattern && pattern !== '*') { - // If user already uses wildcards, use as-is; otherwise wrap in *..* for substring const globPattern = pattern.includes('*') ? pattern : `*${pattern}*`; where += ' AND a.name GLOB ?'; params.push(globPattern); @@ -195,7 +231,7 @@ export function astQueryData(pattern, customDbPath, opts = {}) { } { - const fc = buildFileConditionSQL(file, 'a.file'); + const fc = buildFileConditionSQL(file ?? [], 'a.file'); where += fc.sql; params.push(...fc.params); } @@ -217,14 +253,14 @@ export function astQueryData(pattern, customDbPath, opts = {}) { ORDER BY a.file, a.line `; - let rows; + let rows: AstQueryRow[]; try { - rows = db.prepare(sql).all(...params); + rows = db.prepare(sql).all(...params) as AstQueryRow[]; } finally { db.close(); } - const results = rows.map((r) => ({ + const results: AstQueryResult[] = rows.map((r) => ({ kind: r.kind, name: r.name, file: r.file, @@ -246,15 +282,15 @@ export function astQueryData(pattern, customDbPath, opts = {}) { return paginateResult(data, 'results', { limit, offset }); } -/** - * Query AST nodes — display function (human/json/ndjson output). - */ -export function astQuery(pattern, customDbPath, opts = {}) { +export function astQuery( + pattern: string | undefined, + customDbPath: string | undefined, + opts: AstQueryOpts = {}, +): void { const data = astQueryData(pattern, customDbPath, opts); if (outputResult(data, 'results', opts)) return; - // Human-readable output if (data.results.length === 0) { console.log(`No AST nodes found${pattern ? ` matching "${pattern}"` : ''}.`); return; diff --git a/src/features/audit.js b/src/features/audit.ts similarity index 60% rename from src/features/audit.js rename to src/features/audit.ts index b9bddb2b..cfce21ff 100644 --- a/src/features/audit.js +++ b/src/features/audit.ts @@ -1,11 +1,3 @@ -/** - * audit.js — Composite report: explain + impact + health metrics per function. - * - * Combines explainData (structure, callers, callees, basic complexity), - * full function_complexity health metrics, BFS impact analysis, and - * manifesto threshold breach detection into a single call. - */ - import path from 'node:path'; import { openReadonlyOrFail } from '../db/index.js'; import { normalizeFileFilter } from '../db/query-builder.js'; @@ -13,25 +5,35 @@ import { bfsTransitiveCallers } from '../domain/analysis/impact.js'; import { explainData } from '../domain/queries.js'; import { loadConfig } from '../infrastructure/config.js'; import { isTestFile } from '../infrastructure/test-filter.js'; +import type { BetterSqlite3Database, CodegraphConfig } from '../types.js'; import { RULE_DEFS } from './manifesto.js'; // ─── Threshold resolution ─────────────────────────────────────────── -const FUNCTION_RULES = RULE_DEFS.filter((d) => d.level === 'function'); +interface ThresholdEntry { + metric: string; + warn: number | null; + fail: number | null; +} + +const FUNCTION_RULES = RULE_DEFS.filter((d: { level: string }) => d.level === 'function'); -function resolveThresholds(customDbPath, config) { +function resolveThresholds( + customDbPath: string | undefined, + config: unknown, +): Record { try { const cfg = config || (() => { - const dbDir = path.dirname(customDbPath); + const dbDir = path.dirname(customDbPath!); const repoRoot = path.resolve(dbDir, '..'); return loadConfig(repoRoot); })(); - const userRules = cfg.manifesto || {}; - const resolved = {}; + const userRules = (cfg as Record)['manifesto'] || {}; + const resolved: Record = {}; for (const def of FUNCTION_RULES) { - const user = userRules[def.name]; + const user = (userRules as Record)[def.name]; resolved[def.name] = { metric: def.metric, warn: user?.warn !== undefined ? user.warn : def.defaults.warn, @@ -41,7 +43,7 @@ function resolveThresholds(customDbPath, config) { return resolved; } catch { // Fall back to defaults if config loading fails - const resolved = {}; + const resolved: Record = {}; for (const def of FUNCTION_RULES) { resolved[def.name] = { metric: def.metric, @@ -54,18 +56,28 @@ function resolveThresholds(customDbPath, config) { } // Column name in DB → threshold rule name mapping -const METRIC_TO_RULE = { +const METRIC_TO_RULE: Record = { cognitive: 'cognitive', cyclomatic: 'cyclomatic', max_nesting: 'maxNesting', }; -function checkBreaches(row, thresholds) { - const breaches = []; +interface ThresholdBreach { + metric: string; + value: number; + threshold: number; + level: 'warn' | 'fail'; +} + +function checkBreaches( + row: Record, + thresholds: Record, +): ThresholdBreach[] { + const breaches: ThresholdBreach[] = []; for (const [col, ruleName] of Object.entries(METRIC_TO_RULE)) { const t = thresholds[ruleName]; if (!t) continue; - const value = row[col]; + const value = row[col] as number | null | undefined; if (value == null) continue; if (t.fail != null && value >= t.fail) { breaches.push({ metric: ruleName, value, threshold: t.fail, level: 'fail' }); @@ -78,11 +90,19 @@ function checkBreaches(row, thresholds) { // ─── Phase 4.4 fields (graceful null fallback) ───────────────────── -function readPhase44(db, nodeId) { +interface Phase44Fields { + riskScore: number | null; + complexityNotes: string | null; + sideEffects: string | null; +} + +function readPhase44(db: BetterSqlite3Database, nodeId: number): Phase44Fields { try { const row = db .prepare('SELECT risk_score, complexity_notes, side_effects FROM nodes WHERE id = ?') - .get(nodeId); + .get(nodeId) as + | { risk_score?: number; complexity_notes?: string; side_effects?: string } + | undefined; if (row) { return { riskScore: row.risk_score ?? null, @@ -98,22 +118,59 @@ function readPhase44(db, nodeId) { // ─── auditData ────────────────────────────────────────────────────── -export function auditData(target, customDbPath, opts = {}) { +interface SymbolRef { + name: string; + kind: string; + file: string; + line: number; +} + +interface HealthMetrics { + cognitive: number | null; + cyclomatic: number | null; + maxNesting: number | null; + maintainabilityIndex: number | null; + halstead: { volume: number; difficulty: number; effort: number; bugs: number }; + loc: number; + sloc: number; + commentLines: number; + thresholdBreaches: ThresholdBreach[]; +} + +interface AuditDataOpts { + noTests?: boolean; + config?: CodegraphConfig; + depth?: number; + file?: string; + kind?: string; +} + +export function auditData( + target: string, + customDbPath?: string, + opts: AuditDataOpts = {}, +): { target: string; kind: string; functions: unknown[] } { const noTests = opts.noTests || false; const config = opts.config || loadConfig(); - const maxDepth = opts.depth || config.analysis?.auditDepth || 3; + const maxDepth = + opts.depth || + (config as unknown as { analysis?: { auditDepth?: number } }).analysis?.auditDepth || + 3; const fileFilters = normalizeFileFilter(opts.file); const kind = opts.kind; // 1. Get structure via explainData - const explained = explainData(target, customDbPath, { noTests, depth: 0 }); + const explained = explainData(target, customDbPath!, { noTests, depth: 0 }); // Apply --file and --kind filters for function targets - let results = explained.results; + // biome-ignore lint/suspicious/noExplicitAny: explainData returns a union type that varies by kind + let results: any[] = explained.results; if (explained.kind === 'function') { if (fileFilters.length > 0) - results = results.filter((r) => fileFilters.some((f) => r.file.includes(f))); - if (kind) results = results.filter((r) => r.kind === kind); + results = results.filter((r: { file: string }) => + fileFilters.some((f: string) => r.file.includes(f)), + ); + if (kind) results = results.filter((r: { kind: string }) => r.kind === kind); } if (results.length === 0) { @@ -124,14 +181,17 @@ export function auditData(target, customDbPath, opts = {}) { const db = openReadonlyOrFail(customDbPath); const thresholds = resolveThresholds(customDbPath, opts.config); - let functions; + let functions: unknown[]; try { if (explained.kind === 'file') { // File target: explainData returns file-level info with publicApi + internal // We need to enrich each symbol functions = []; for (const fileResult of results) { - const allSymbols = [...(fileResult.publicApi || []), ...(fileResult.internal || [])]; + const allSymbols = [ + ...(fileResult.publicApi || []), + ...(fileResult.internal || []), + ] as FileSymbol[]; if (kind) { const filtered = allSymbols.filter((s) => s.kind === kind); for (const sym of filtered) { @@ -145,7 +205,9 @@ export function auditData(target, customDbPath, opts = {}) { } } else { // Function target: explainData returns per-function results - functions = results.map((r) => enrichFunction(db, r, noTests, maxDepth, thresholds)); + functions = results.map((r: ExplainResult) => + enrichFunction(db, r, noTests, maxDepth, thresholds), + ); } } finally { db.close(); @@ -156,10 +218,31 @@ export function auditData(target, customDbPath, opts = {}) { // ─── Enrich a function result from explainData ────────────────────── -function enrichFunction(db, r, noTests, maxDepth, thresholds) { +interface ExplainResult { + name: string; + kind: string; + file: string; + line: number; + endLine?: number | null; + role?: string | null; + lineCount?: number | null; + summary?: string | null; + signature?: string | null; + callees?: SymbolRef[]; + callers?: SymbolRef[]; + relatedTests?: { file: string }[]; +} + +function enrichFunction( + db: BetterSqlite3Database, + r: ExplainResult, + noTests: boolean, + maxDepth: number, + thresholds: Record, +): unknown { const nodeRow = db .prepare('SELECT id FROM nodes WHERE name = ? AND file = ? AND line = ?') - .get(r.name, r.file, r.line); + .get(r.name, r.file, r.line) as { id: number } | undefined; const nodeId = nodeRow?.id; const health = nodeId ? buildHealth(db, nodeId, thresholds) : defaultHealth(); @@ -191,37 +274,55 @@ function enrichFunction(db, r, noTests, maxDepth, thresholds) { // ─── Enrich a symbol from file-level explainData ──────────────────── -function enrichSymbol(db, sym, file, noTests, maxDepth, thresholds) { +interface FileSymbol { + name: string; + kind: string; + line: number; + role?: string | null; + summary?: string | null; + signature?: string | null; +} + +function enrichSymbol( + db: BetterSqlite3Database, + sym: FileSymbol, + file: string, + noTests: boolean, + maxDepth: number, + thresholds: Record, +): unknown { const nodeRow = db .prepare('SELECT id, end_line FROM nodes WHERE name = ? AND file = ? AND line = ?') - .get(sym.name, file, sym.line); + .get(sym.name, file, sym.line) as { id: number; end_line: number | null } | undefined; const nodeId = nodeRow?.id; const endLine = nodeRow?.end_line || null; const lineCount = endLine ? endLine - sym.line + 1 : null; // Get callers/callees for this symbol - let callees = []; - let callers = []; - let relatedTests = []; + let callees: SymbolRef[] = []; + let callers: SymbolRef[] = []; + let relatedTests: { file: string }[] = []; if (nodeId) { - callees = db - .prepare( - `SELECT n.name, n.kind, n.file, n.line + callees = ( + db + .prepare( + `SELECT n.name, n.kind, n.file, n.line FROM edges e JOIN nodes n ON e.target_id = n.id WHERE e.source_id = ? AND e.kind = 'calls'`, - ) - .all(nodeId) - .map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line })); - - callers = db - .prepare( - `SELECT n.name, n.kind, n.file, n.line + ) + .all(nodeId) as SymbolRef[] + ).map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line })); + + callers = ( + db + .prepare( + `SELECT n.name, n.kind, n.file, n.line FROM edges e JOIN nodes n ON e.source_id = n.id WHERE e.target_id = ? AND e.kind = 'calls'`, - ) - .all(nodeId) - .map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line })); + ) + .all(nodeId) as SymbolRef[] + ).map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line })); if (noTests) callers = callers.filter((c) => !isTestFile(c.file)); const testCallerRows = db @@ -229,7 +330,7 @@ function enrichSymbol(db, sym, file, noTests, maxDepth, thresholds) { `SELECT DISTINCT n.file FROM edges e JOIN nodes n ON e.source_id = n.id WHERE e.target_id = ? AND e.kind = 'calls'`, ) - .all(nodeId); + .all(nodeId) as { file: string }[]; relatedTests = testCallerRows.filter((r) => isTestFile(r.file)).map((r) => ({ file: r.file })); } @@ -262,7 +363,25 @@ function enrichSymbol(db, sym, file, noTests, maxDepth, thresholds) { // ─── Build health metrics from function_complexity ────────────────── -function buildHealth(db, nodeId, thresholds) { +interface ComplexityRow { + cognitive: number; + cyclomatic: number; + max_nesting: number; + maintainability_index: number | null; + halstead_volume: number | null; + halstead_difficulty: number | null; + halstead_effort: number | null; + halstead_bugs: number | null; + loc: number | null; + sloc: number | null; + comment_lines: number | null; +} + +function buildHealth( + db: BetterSqlite3Database, + nodeId: number, + thresholds: Record, +): HealthMetrics { try { const row = db .prepare( @@ -271,7 +390,7 @@ function buildHealth(db, nodeId, thresholds) { loc, sloc, comment_lines FROM function_complexity WHERE node_id = ?`, ) - .get(nodeId); + .get(nodeId) as ComplexityRow | undefined; if (!row) return defaultHealth(); @@ -289,7 +408,7 @@ function buildHealth(db, nodeId, thresholds) { loc: row.loc || 0, sloc: row.sloc || 0, commentLines: row.comment_lines || 0, - thresholdBreaches: checkBreaches(row, thresholds), + thresholdBreaches: checkBreaches(row as unknown as Record, thresholds), }; } catch { /* table may not exist */ @@ -297,7 +416,7 @@ function buildHealth(db, nodeId, thresholds) { } } -function defaultHealth() { +function defaultHealth(): HealthMetrics { return { cognitive: null, cyclomatic: null, diff --git a/src/features/batch.js b/src/features/batch.js deleted file mode 100644 index f5f386a7..00000000 --- a/src/features/batch.js +++ /dev/null @@ -1,156 +0,0 @@ -/** - * Batch query orchestration — run the same query command against multiple targets - * and return all results in a single JSON payload. - * - * Designed for multi-agent swarms that need to dispatch 20+ queries in one call. - */ - -import { - contextData, - explainData, - exportsData, - fileDepsData, - fnDepsData, - fnImpactData, - impactAnalysisData, - whereData, -} from '../domain/queries.js'; -import { ConfigError } from '../shared/errors.js'; -import { complexityData } from './complexity.js'; -import { dataflowData } from './dataflow.js'; -import { flowData } from './flow.js'; - -/** - * Map of supported batch commands → their data function + first-arg semantics. - * `sig` describes how the target string is passed to the data function: - * - 'name' → dataFn(target, dbPath, opts) - * - 'target' → dataFn(target, dbPath, opts) - * - 'file' → dataFn(target, dbPath, opts) - * - 'dbOnly' → dataFn(dbPath, { ...opts, target }) (target goes into opts) - */ -export const BATCH_COMMANDS = { - 'fn-impact': { fn: fnImpactData, sig: 'name' }, - context: { fn: contextData, sig: 'name' }, - explain: { fn: explainData, sig: 'target' }, - where: { fn: whereData, sig: 'target' }, - query: { fn: fnDepsData, sig: 'name' }, - impact: { fn: impactAnalysisData, sig: 'file' }, - deps: { fn: fileDepsData, sig: 'file' }, - exports: { fn: exportsData, sig: 'file' }, - flow: { fn: flowData, sig: 'name' }, - dataflow: { fn: dataflowData, sig: 'name' }, - complexity: { fn: complexityData, sig: 'dbOnly' }, -}; - -/** - * Run a query command against multiple targets, returning all results. - * - * @param {string} command - One of the keys in BATCH_COMMANDS - * @param {string[]} targets - List of target names/paths - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - Shared options passed to every invocation - * @returns {{ command: string, total: number, succeeded: number, failed: number, results: object[] }} - */ -export function batchData(command, targets, customDbPath, opts = {}) { - const entry = BATCH_COMMANDS[command]; - if (!entry) { - throw new ConfigError( - `Unknown batch command "${command}". Valid commands: ${Object.keys(BATCH_COMMANDS).join(', ')}`, - ); - } - - const results = []; - let succeeded = 0; - let failed = 0; - - for (const target of targets) { - try { - let data; - if (entry.sig === 'dbOnly') { - // complexityData(dbPath, { ...opts, target }) - data = entry.fn(customDbPath, { ...opts, target }); - } else { - // All other: dataFn(target, dbPath, opts) - data = entry.fn(target, customDbPath, opts); - } - results.push({ target, ok: true, data }); - succeeded++; - } catch (err) { - results.push({ target, ok: false, error: err.message }); - failed++; - } - } - - return { command, total: targets.length, succeeded, failed, results }; -} - -/** - * Expand comma-separated positional args into individual entries. - * `['a,b', 'c']` → `['a', 'b', 'c']`. - * Trims whitespace, filters empties. Passes through object items unchanged. - * - * @param {Array} targets - * @returns {Array} - */ -export function splitTargets(targets) { - const out = []; - for (const item of targets) { - if (typeof item !== 'string') { - out.push(item); - continue; - } - for (const part of item.split(',')) { - const trimmed = part.trim(); - if (trimmed) out.push(trimmed); - } - } - return out; -} - -/** - * Multi-command batch orchestration — run different commands per target. - * - * @param {Array<{command: string, target: string, opts?: object}>} items - * @param {string} [customDbPath] - * @param {object} [sharedOpts] - Default opts merged under per-item opts - * @returns {{ mode: 'multi', total: number, succeeded: number, failed: number, results: object[] }} - */ -export function multiBatchData(items, customDbPath, sharedOpts = {}) { - const results = []; - let succeeded = 0; - let failed = 0; - - for (const item of items) { - const { command, target, opts: itemOpts } = item; - const entry = BATCH_COMMANDS[command]; - - if (!entry) { - results.push({ - command, - target, - ok: false, - error: `Unknown batch command "${command}". Valid commands: ${Object.keys(BATCH_COMMANDS).join(', ')}`, - }); - failed++; - continue; - } - - const merged = { ...sharedOpts, ...itemOpts }; - - try { - let data; - if (entry.sig === 'dbOnly') { - data = entry.fn(customDbPath, { ...merged, target }); - } else { - data = entry.fn(target, customDbPath, merged); - } - results.push({ command, target, ok: true, data }); - succeeded++; - } catch (err) { - results.push({ command, target, ok: false, error: err.message }); - failed++; - } - } - - return { mode: 'multi', total: items.length, succeeded, failed, results }; -} diff --git a/src/features/batch.ts b/src/features/batch.ts new file mode 100644 index 00000000..12a6aefb --- /dev/null +++ b/src/features/batch.ts @@ -0,0 +1,182 @@ +import { + contextData, + explainData, + exportsData, + fileDepsData, + fnDepsData, + fnImpactData, + impactAnalysisData, + whereData, +} from '../domain/queries.js'; +import { ConfigError } from '../shared/errors.js'; +import { complexityData } from './complexity.js'; +import { dataflowData } from './dataflow.js'; +import { flowData } from './flow.js'; + +type BatchSig = 'name' | 'target' | 'file' | 'dbOnly'; + +interface BatchCommandEntry { + fn: (...args: unknown[]) => unknown; + sig: BatchSig; +} + +export const BATCH_COMMANDS: Record = { + 'fn-impact': { fn: fnImpactData as (...args: unknown[]) => unknown, sig: 'name' }, + context: { fn: contextData as (...args: unknown[]) => unknown, sig: 'name' }, + explain: { fn: explainData as (...args: unknown[]) => unknown, sig: 'target' }, + where: { fn: whereData as (...args: unknown[]) => unknown, sig: 'target' }, + query: { fn: fnDepsData as (...args: unknown[]) => unknown, sig: 'name' }, + impact: { fn: impactAnalysisData as (...args: unknown[]) => unknown, sig: 'file' }, + deps: { fn: fileDepsData as (...args: unknown[]) => unknown, sig: 'file' }, + exports: { fn: exportsData as (...args: unknown[]) => unknown, sig: 'file' }, + flow: { fn: flowData as (...args: unknown[]) => unknown, sig: 'name' }, + dataflow: { fn: dataflowData as (...args: unknown[]) => unknown, sig: 'name' }, + complexity: { fn: complexityData as (...args: unknown[]) => unknown, sig: 'dbOnly' }, +}; + +interface BatchResultOk { + target: string; + ok: true; + data: unknown; +} + +interface BatchResultErr { + target: string; + ok: false; + error: string; +} + +type BatchResultItem = BatchResultOk | BatchResultErr; + +interface BatchResult { + command: string; + total: number; + succeeded: number; + failed: number; + results: BatchResultItem[]; +} + +export function batchData( + command: string, + targets: string[], + customDbPath: string | undefined, + opts: Record = {}, +): BatchResult { + const entry = BATCH_COMMANDS[command]; + if (!entry) { + throw new ConfigError( + `Unknown batch command "${command}". Valid commands: ${Object.keys(BATCH_COMMANDS).join(', ')}`, + ); + } + + const results: BatchResultItem[] = []; + let succeeded = 0; + let failed = 0; + + for (const target of targets) { + try { + let data: unknown; + if (entry.sig === 'dbOnly') { + data = entry.fn(customDbPath, { ...opts, target }); + } else { + data = entry.fn(target, customDbPath, opts); + } + results.push({ target, ok: true, data }); + succeeded++; + } catch (err) { + results.push({ target, ok: false, error: (err as Error).message }); + failed++; + } + } + + return { command, total: targets.length, succeeded, failed, results }; +} + +export function splitTargets(targets: Array): Array { + const out: Array = []; + for (const item of targets) { + if (typeof item !== 'string') { + out.push(item); + continue; + } + for (const part of item.split(',')) { + const trimmed = part.trim(); + if (trimmed) out.push(trimmed); + } + } + return out; +} + +interface MultiBatchItem { + command: string; + target: string; + opts?: Record; +} + +interface MultiBatchResultOk { + command: string; + target: string; + ok: true; + data: unknown; +} + +interface MultiBatchResultErr { + command: string; + target: string; + ok: false; + error: string; +} + +type MultiBatchResultItem = MultiBatchResultOk | MultiBatchResultErr; + +interface MultiBatchResult { + mode: 'multi'; + total: number; + succeeded: number; + failed: number; + results: MultiBatchResultItem[]; +} + +export function multiBatchData( + items: MultiBatchItem[], + customDbPath: string | undefined, + sharedOpts: Record = {}, +): MultiBatchResult { + const results: MultiBatchResultItem[] = []; + let succeeded = 0; + let failed = 0; + + for (const item of items) { + const { command, target, opts: itemOpts } = item; + const entry = BATCH_COMMANDS[command]; + + if (!entry) { + results.push({ + command, + target, + ok: false, + error: `Unknown batch command "${command}". Valid commands: ${Object.keys(BATCH_COMMANDS).join(', ')}`, + }); + failed++; + continue; + } + + const merged = { ...sharedOpts, ...itemOpts }; + + try { + let data: unknown; + if (entry.sig === 'dbOnly') { + data = entry.fn(customDbPath, { ...merged, target }); + } else { + data = entry.fn(target, customDbPath, merged); + } + results.push({ command, target, ok: true, data }); + succeeded++; + } catch (err) { + results.push({ command, target, ok: false, error: (err as Error).message }); + failed++; + } + } + + return { mode: 'multi', total: items.length, succeeded, failed, results }; +} diff --git a/src/features/boundaries.js b/src/features/boundaries.ts similarity index 63% rename from src/features/boundaries.js rename to src/features/boundaries.ts index 536dbafa..672ba55b 100644 --- a/src/features/boundaries.js +++ b/src/features/boundaries.ts @@ -1,27 +1,19 @@ import { debug } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; +import type { BetterSqlite3Database } from '../types.js'; // ─── Glob-to-Regex ─────────────────────────────────────────────────── -/** - * Convert a simple glob pattern to a RegExp. - * Supports `**` (any path segment), `*` (non-slash), `?` (single non-slash char). - * @param {string} pattern - * @returns {RegExp} - */ -export function globToRegex(pattern) { +export function globToRegex(pattern: string): RegExp { let re = ''; let i = 0; while (i < pattern.length) { - const ch = pattern[i]; + const ch = pattern[i] as string; if (ch === '*' && pattern[i + 1] === '*') { - // ** matches any number of path segments re += '.*'; i += 2; - // Skip trailing slash after ** if (pattern[i] === '/') i++; } else if (ch === '*') { - // * matches non-slash characters re += '[^/]*'; i++; } else if (ch === '?') { @@ -40,12 +32,12 @@ export function globToRegex(pattern) { // ─── Presets ───────────────────────────────────────────────────────── -/** - * Built-in preset definitions. - * Each defines layers ordered from innermost (most protected) to outermost. - * Inner layers cannot import from outer layers. - */ -export const PRESETS = { +interface PresetDef { + layers: string[]; + description: string; +} + +export const PRESETS: Record = { hexagonal: { layers: ['domain', 'application', 'adapters', 'infrastructure'], description: 'Inner layers cannot import outer layers', @@ -66,14 +58,34 @@ export const PRESETS = { // ─── Module Resolution ─────────────────────────────────────────────── -/** - * Parse module definitions into a Map of name → { regex, pattern, layer? }. - * Supports string shorthand and object form. - * @param {object} boundaryConfig - The `manifesto.boundaries` config object - * @returns {Map} - */ -export function resolveModules(boundaryConfig) { - const modules = new Map(); +interface ResolvedModule { + regex: RegExp; + pattern: string; + layer?: string; +} + +interface ModuleDef { + match: string; + layer?: string; +} + +interface BoundaryRule { + from: string; + notTo?: string[]; + onlyTo?: string[]; + message?: string; +} + +interface BoundaryConfig { + modules?: Record; + preset?: string; + rules?: BoundaryRule[]; +} + +export function resolveModules( + boundaryConfig: BoundaryConfig | undefined, +): Map { + const modules = new Map(); const defs = boundaryConfig?.modules; if (!defs || typeof defs !== 'object') return modules; @@ -93,29 +105,20 @@ export function resolveModules(boundaryConfig) { // ─── Validation ────────────────────────────────────────────────────── -/** - * Validate the `modules` section of a boundary config. - * @param {object} modules - * @param {string[]} errors - Mutated: push any validation errors - */ -function validateModules(modules, errors) { - if (!modules || typeof modules !== 'object' || Object.keys(modules).length === 0) { +function validateModules(modules: unknown, errors: string[]): void { + if (!modules || typeof modules !== 'object' || Object.keys(modules as object).length === 0) { errors.push('boundaries.modules must be a non-empty object'); return; } - for (const [name, value] of Object.entries(modules)) { + for (const [name, value] of Object.entries(modules as Record)) { if (typeof value === 'string') continue; - if (value && typeof value === 'object' && typeof value.match === 'string') continue; + if (value && typeof value === 'object' && typeof (value as ModuleDef).match === 'string') + continue; errors.push(`boundaries.modules.${name}: must be a glob string or { match: "" }`); } } -/** - * Validate the `preset` field of a boundary config. - * @param {string|null|undefined} preset - * @param {string[]} errors - Mutated: push any validation errors - */ -function validatePreset(preset, errors) { +function validatePreset(preset: unknown, errors: string[]): void { if (preset == null) return; if (typeof preset !== 'string' || !PRESETS[preset]) { errors.push( @@ -124,15 +127,13 @@ function validatePreset(preset, errors) { } } -/** - * Validate a single rule's target list (`notTo` or `onlyTo`). - * @param {*} list - The target list value - * @param {string} field - "notTo" or "onlyTo" - * @param {number} idx - Rule index for error messages - * @param {Set} moduleNames - * @param {string[]} errors - Mutated - */ -function validateTargetList(list, field, idx, moduleNames, errors) { +function validateTargetList( + list: unknown, + field: string, + idx: number, + moduleNames: Set, + errors: string[], +): void { if (!Array.isArray(list)) { errors.push(`boundaries.rules[${idx}]: "${field}" must be an array`); return; @@ -144,21 +145,15 @@ function validateTargetList(list, field, idx, moduleNames, errors) { } } -/** - * Validate the `rules` array of a boundary config. - * @param {Array} rules - * @param {object|undefined} modules - The modules config (for cross-referencing names) - * @param {string[]} errors - Mutated - */ -function validateRules(rules, modules, errors) { +function validateRules(rules: unknown, modules: unknown, errors: string[]): void { if (!rules) return; if (!Array.isArray(rules)) { errors.push('boundaries.rules must be an array'); return; } - const moduleNames = modules ? new Set(Object.keys(modules)) : new Set(); + const moduleNames = modules ? new Set(Object.keys(modules as object)) : new Set(); for (let i = 0; i < rules.length; i++) { - const rule = rules[i]; + const rule = rules[i] as BoundaryRule; if (!rule.from) { errors.push(`boundaries.rules[${i}]: missing "from" field`); } else if (!moduleNames.has(rule.from)) { @@ -175,14 +170,10 @@ function validateRules(rules, modules, errors) { } } -/** - * Validate that module layer assignments match preset layers. - * @param {object} config - * @param {string[]} errors - Mutated - */ -function validateLayerAssignments(config, errors) { +function validateLayerAssignments(config: BoundaryConfig, errors: string[]): void { if (!config.preset || !PRESETS[config.preset] || !config.modules) return; - const presetLayers = new Set(PRESETS[config.preset].layers); + const preset = PRESETS[config.preset]!; + const presetLayers = new Set(preset.layers); for (const [name, value] of Object.entries(config.modules)) { if (typeof value === 'object' && value.layer && !presetLayers.has(value.layer)) { errors.push( @@ -192,53 +183,46 @@ function validateLayerAssignments(config, errors) { } } -/** - * Validate a boundary configuration object. - * @param {object} config - The `manifesto.boundaries` config - * @returns {{ valid: boolean, errors: string[] }} - */ -export function validateBoundaryConfig(config) { +export function validateBoundaryConfig(config: unknown): { valid: boolean; errors: string[] } { if (!config || typeof config !== 'object') { return { valid: false, errors: ['boundaries config must be an object'] }; } - const errors = []; - validateModules(config.modules, errors); - validatePreset(config.preset, errors); - validateRules(config.rules, config.modules, errors); - validateLayerAssignments(config, errors); + const errors: string[] = []; + const cfg = config as BoundaryConfig; + validateModules(cfg.modules, errors); + validatePreset(cfg.preset, errors); + validateRules(cfg.rules, cfg.modules, errors); + validateLayerAssignments(cfg, errors); return { valid: errors.length === 0, errors }; } // ─── Preset Rule Generation ───────────────────────────────────────── -/** - * Generate notTo rules from preset layer assignments. - * Inner layers cannot import from outer layers. - */ -function generatePresetRules(modules, presetName) { +function generatePresetRules( + modules: Map, + presetName: string, +): BoundaryRule[] { const preset = PRESETS[presetName]; if (!preset) return []; const layers = preset.layers; - const layerIndex = new Map(layers.map((l, i) => [l, i])); + const layerIndex = new Map(layers.map((l, i) => [l, i])); - // Group modules by layer - const modulesByLayer = new Map(); + const modulesByLayer = new Map(); for (const [name, mod] of modules) { if (mod.layer && layerIndex.has(mod.layer)) { if (!modulesByLayer.has(mod.layer)) modulesByLayer.set(mod.layer, []); - modulesByLayer.get(mod.layer).push(name); + modulesByLayer.get(mod.layer)!.push(name); } } - const rules = []; - // For each layer, forbid imports to any outer (higher-index) layer + const rules: BoundaryRule[] = []; for (const [layer, modNames] of modulesByLayer) { - const idx = layerIndex.get(layer); - const outerModules = []; + const idx = layerIndex.get(layer)!; + const outerModules: string[] = []; for (const [otherLayer, otherModNames] of modulesByLayer) { - if (layerIndex.get(otherLayer) > idx) { + if (layerIndex.get(otherLayer)! > idx) { outerModules.push(...otherModNames); } } @@ -254,40 +238,45 @@ function generatePresetRules(modules, presetName) { // ─── Evaluation ────────────────────────────────────────────────────── -/** - * Classify a file path into a module name. Returns the first matching module or null. - */ -function classifyFile(filePath, modules) { +function classifyFile(filePath: string, modules: Map): string | null { for (const [name, mod] of modules) { if (mod.regex.test(filePath)) return name; } return null; } -/** - * Evaluate boundary rules against the dependency graph. - * - * @param {object} db - Open SQLite database (readonly) - * @param {object} boundaryConfig - The `manifesto.boundaries` config - * @param {object} [opts] - * @param {string[]} [opts.scopeFiles] - Only check edges from these files (diff-impact mode) - * @param {boolean} [opts.noTests] - Exclude test files - * @returns {{ violations: object[], violationCount: number }} - */ -export function evaluateBoundaries(db, boundaryConfig, opts = {}) { +interface BoundaryViolation { + rule: string; + name: string; + file: string; + targetFile: string; + message: string; + value: number; + threshold: number; +} + +interface EvaluateBoundariesOpts { + scopeFiles?: string[]; + noTests?: boolean; +} + +export function evaluateBoundaries( + db: BetterSqlite3Database, + boundaryConfig: BoundaryConfig | undefined, + opts: EvaluateBoundariesOpts = {}, +): { violations: BoundaryViolation[]; violationCount: number } { if (!boundaryConfig) return { violations: [], violationCount: 0 }; const { valid, errors } = validateBoundaryConfig(boundaryConfig); if (!valid) { - debug('boundary config validation failed: %s', errors.join('; ')); + debug(`boundary config validation failed: ${errors.join('; ')}`); return { violations: [], violationCount: 0 }; } const modules = resolveModules(boundaryConfig); if (modules.size === 0) return { violations: [], violationCount: 0 }; - // Merge user rules with preset-generated rules - let allRules = []; + let allRules: BoundaryRule[] = []; if (boundaryConfig.preset) { allRules = generatePresetRules(modules, boundaryConfig.preset); } @@ -296,8 +285,7 @@ export function evaluateBoundaries(db, boundaryConfig, opts = {}) { } if (allRules.length === 0) return { violations: [], violationCount: 0 }; - // Query file-level import edges - let edges; + let edges: Array<{ source: string; target: string }>; try { edges = db .prepare( @@ -307,13 +295,12 @@ export function evaluateBoundaries(db, boundaryConfig, opts = {}) { JOIN nodes n2 ON e.target_id = n2.id WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type')`, ) - .all(); + .all() as Array<{ source: string; target: string }>; } catch (err) { - debug('boundary edge query failed: %s', err.message); + debug(`boundary edge query failed: ${(err as Error).message}`); return { violations: [], violationCount: 0 }; } - // Filter by scope and tests if (opts.noTests) { edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); } @@ -322,14 +309,12 @@ export function evaluateBoundaries(db, boundaryConfig, opts = {}) { edges = edges.filter((e) => scope.has(e.source)); } - // Check each edge against rules - const violations = []; + const violations: BoundaryViolation[] = []; for (const edge of edges) { const fromModule = classifyFile(edge.source, modules); const toModule = classifyFile(edge.target, modules); - // Skip edges where source or target is not in any module if (!fromModule || !toModule) continue; for (const rule of allRules) { diff --git a/src/features/branch-compare.js b/src/features/branch-compare.ts similarity index 70% rename from src/features/branch-compare.js rename to src/features/branch-compare.ts index 474e162e..25880f35 100644 --- a/src/features/branch-compare.js +++ b/src/features/branch-compare.ts @@ -1,11 +1,3 @@ -/** - * Branch structural diff – compare code structure between two git refs. - * - * Builds separate codegraph databases for each ref using git worktrees, - * then diffs at the symbol level to show added/removed/changed symbols - * and transitive caller impact. - */ - import { execFileSync } from 'node:child_process'; import fs from 'node:fs'; import os from 'node:os'; @@ -14,10 +6,17 @@ import Database from 'better-sqlite3'; import { buildGraph } from '../domain/graph/builder.js'; import { kindIcon } from '../domain/queries.js'; import { isTestFile } from '../infrastructure/test-filter.js'; +import type { BetterSqlite3Database, EngineMode } from '../types.js'; + +type DatabaseConstructor = new ( + path: string, + opts?: Record, +) => BetterSqlite3Database; +const Db = Database as unknown as DatabaseConstructor; // ─── Git Helpers ──────────────────────────────────────────────────────── -function validateGitRef(repoRoot, ref) { +function validateGitRef(repoRoot: string, ref: string): string | null { try { const sha = execFileSync('git', ['rev-parse', '--verify', ref], { cwd: repoRoot, @@ -30,7 +29,7 @@ function validateGitRef(repoRoot, ref) { } } -function getChangedFilesBetweenRefs(repoRoot, base, target) { +function getChangedFilesBetweenRefs(repoRoot: string, base: string, target: string): string[] { const output = execFileSync('git', ['diff', '--name-only', `${base}..${target}`], { cwd: repoRoot, encoding: 'utf-8', @@ -40,7 +39,7 @@ function getChangedFilesBetweenRefs(repoRoot, base, target) { return output.split('\n').filter(Boolean); } -function createWorktree(repoRoot, ref, dir) { +function createWorktree(repoRoot: string, ref: string, dir: string): void { execFileSync('git', ['worktree', 'add', '--detach', dir, ref], { cwd: repoRoot, encoding: 'utf-8', @@ -48,7 +47,7 @@ function createWorktree(repoRoot, ref, dir) { }); } -function removeWorktree(repoRoot, dir) { +function removeWorktree(repoRoot: string, dir: string): void { try { execFileSync('git', ['worktree', 'remove', '--force', dir], { cwd: repoRoot, @@ -56,7 +55,6 @@ function removeWorktree(repoRoot, dir) { stdio: ['pipe', 'pipe', 'pipe'], }); } catch { - // Fallback: remove directory and prune try { fs.rmSync(dir, { recursive: true, force: true }); } catch { @@ -76,20 +74,51 @@ function removeWorktree(repoRoot, dir) { // ─── Symbol Loading ───────────────────────────────────────────────────── -function makeSymbolKey(kind, file, name) { +interface SymbolInfo { + id: number; + name: string; + kind: string; + file: string; + line: number; + lineCount: number; + fanIn: number; + fanOut: number; +} + +interface CallerInfo { + name: string; + kind: string; + file: string; + line: number; +} + +interface ChangedSymbol { + name: string; + kind: string; + file: string; + base: { line: number; lineCount: number; fanIn: number; fanOut: number }; + target: { line: number; lineCount: number; fanIn: number; fanOut: number }; + changes: { lineCount: number; fanIn: number; fanOut: number }; + impact?: CallerInfo[]; +} + +function makeSymbolKey(kind: string, file: string, name: string): string { return `${kind}::${file}::${name}`; } -function loadSymbolsFromDb(dbPath, changedFiles, noTests) { - const db = new Database(dbPath, { readonly: true }); +function loadSymbolsFromDb( + dbPath: string, + changedFiles: string[], + noTests: boolean, +): Map { + const db = new Db(dbPath, { readonly: true }); try { - const symbols = new Map(); + const symbols = new Map(); if (changedFiles.length === 0) { return symbols; } - // Query nodes in changed files const placeholders = changedFiles.map(() => '?').join(', '); const rows = db .prepare( @@ -99,9 +128,15 @@ function loadSymbolsFromDb(dbPath, changedFiles, noTests) { AND n.kind NOT IN ('file', 'directory') ORDER BY n.file, n.line`, ) - .all(...changedFiles); + .all(...changedFiles) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + end_line: number | null; + }>; - // Compute fan_in and fan_out for each node const fanInStmt = db.prepare( `SELECT COUNT(*) AS cnt FROM edges WHERE target_id = ? AND kind = 'calls'`, ); @@ -113,8 +148,8 @@ function loadSymbolsFromDb(dbPath, changedFiles, noTests) { if (noTests && isTestFile(row.file)) continue; const lineCount = row.end_line ? row.end_line - row.line + 1 : 0; - const fanIn = fanInStmt.get(row.id).cnt; - const fanOut = fanOutStmt.get(row.id).cnt; + const fanIn = (fanInStmt.get(row.id) as { cnt: number }).cnt; + const fanOut = (fanOutStmt.get(row.id) as { cnt: number }).cnt; const key = makeSymbolKey(row.kind, row.file, row.name); symbols.set(key, { @@ -137,19 +172,24 @@ function loadSymbolsFromDb(dbPath, changedFiles, noTests) { // ─── Caller BFS ───────────────────────────────────────────────────────── -function loadCallersFromDb(dbPath, nodeIds, maxDepth, noTests) { +function loadCallersFromDb( + dbPath: string, + nodeIds: number[], + maxDepth: number, + noTests: boolean, +): CallerInfo[] { if (nodeIds.length === 0) return []; - const db = new Database(dbPath, { readonly: true }); + const db = new Db(dbPath, { readonly: true }); try { - const allCallers = new Set(); + const allCallers = new Set(); for (const startId of nodeIds) { - const visited = new Set([startId]); + const visited = new Set([startId]); let frontier = [startId]; for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; + const nextFrontier: number[] = []; for (const fid of frontier) { const callers = db .prepare( @@ -157,7 +197,13 @@ function loadCallersFromDb(dbPath, nodeIds, maxDepth, noTests) { FROM edges e JOIN nodes n ON e.source_id = n.id WHERE e.target_id = ? AND e.kind = 'calls'`, ) - .all(fid); + .all(fid) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + }>; for (const c of callers) { if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { @@ -174,7 +220,7 @@ function loadCallersFromDb(dbPath, nodeIds, maxDepth, noTests) { } } - return [...allCallers].map((s) => JSON.parse(s)); + return [...allCallers].map((s) => JSON.parse(s) as CallerInfo); } finally { db.close(); } @@ -182,26 +228,26 @@ function loadCallersFromDb(dbPath, nodeIds, maxDepth, noTests) { // ─── Symbol Comparison ────────────────────────────────────────────────── -function compareSymbols(baseSymbols, targetSymbols) { - const added = []; - const removed = []; - const changed = []; +function compareSymbols( + baseSymbols: Map, + targetSymbols: Map, +): { added: SymbolInfo[]; removed: SymbolInfo[]; changed: ChangedSymbol[] } { + const added: SymbolInfo[] = []; + const removed: SymbolInfo[] = []; + const changed: ChangedSymbol[] = []; - // Added: in target but not base for (const [key, sym] of targetSymbols) { if (!baseSymbols.has(key)) { added.push(sym); } } - // Removed: in base but not target for (const [key, sym] of baseSymbols) { if (!targetSymbols.has(key)) { removed.push(sym); } } - // Changed: in both but with different metrics for (const [key, baseSym] of baseSymbols) { const targetSym = targetSymbols.get(key); if (!targetSym) continue; @@ -241,13 +287,46 @@ function compareSymbols(baseSymbols, targetSymbols) { // ─── Main Data Function ───────────────────────────────────────────────── -export async function branchCompareData(baseRef, targetRef, opts = {}) { +interface BranchCompareOpts { + repoRoot?: string; + depth?: number; + noTests?: boolean; + engine?: string; +} + +interface BranchCompareSummary { + added: number; + removed: number; + changed: number; + totalImpacted: number; + filesAffected: number; +} + +type SymbolWithoutId = Omit & { impact?: CallerInfo[] }; + +interface BranchCompareResult { + error?: string; + baseRef?: string; + targetRef?: string; + baseSha?: string; + targetSha?: string; + changedFiles?: string[]; + added?: SymbolWithoutId[]; + removed?: SymbolWithoutId[]; + changed?: ChangedSymbol[]; + summary?: BranchCompareSummary; +} + +export async function branchCompareData( + baseRef: string, + targetRef: string, + opts: BranchCompareOpts = {}, +): Promise { const repoRoot = opts.repoRoot || process.cwd(); const maxDepth = opts.depth || 3; const noTests = opts.noTests || false; - const engine = opts.engine || 'wasm'; + const engine = (opts.engine || 'wasm') as EngineMode; - // Check if this is a git repo try { execFileSync('git', ['rev-parse', '--git-dir'], { cwd: repoRoot, @@ -258,14 +337,12 @@ export async function branchCompareData(baseRef, targetRef, opts = {}) { return { error: 'Not a git repository' }; } - // Validate refs const baseSha = validateGitRef(repoRoot, baseRef); if (!baseSha) return { error: `Invalid git ref: "${baseRef}"` }; const targetSha = validateGitRef(repoRoot, targetRef); if (!targetSha) return { error: `Invalid git ref: "${targetRef}"` }; - // Get changed files const changedFiles = getChangedFilesBetweenRefs(repoRoot, baseSha, targetSha); if (changedFiles.length === 0) { @@ -288,49 +365,41 @@ export async function branchCompareData(baseRef, targetRef, opts = {}) { }; } - // Create temp dir for worktrees const tmpBase = fs.mkdtempSync(path.join(os.tmpdir(), 'codegraph-bc-')); const baseDir = path.join(tmpBase, 'base'); const targetDir = path.join(tmpBase, 'target'); try { - // Create worktrees createWorktree(repoRoot, baseSha, baseDir); createWorktree(repoRoot, targetSha, targetDir); - // Build graphs await buildGraph(baseDir, { engine, skipRegistry: true }); await buildGraph(targetDir, { engine, skipRegistry: true }); const baseDbPath = path.join(baseDir, '.codegraph', 'graph.db'); const targetDbPath = path.join(targetDir, '.codegraph', 'graph.db'); - // Normalize file paths for comparison (relative to worktree root) const normalizedFiles = changedFiles.map((f) => f.replace(/\\/g, '/')); - // Load symbols from both DBs const baseSymbols = loadSymbolsFromDb(baseDbPath, normalizedFiles, noTests); const targetSymbols = loadSymbolsFromDb(targetDbPath, normalizedFiles, noTests); - // Compare const { added, removed, changed } = compareSymbols(baseSymbols, targetSymbols); - // BFS for transitive callers of removed/changed symbols in base graph const removedIds = removed.map((s) => s.id).filter(Boolean); const changedIds = changed .map((s) => { const baseSym = baseSymbols.get(makeSymbolKey(s.kind, s.file, s.name)); return baseSym?.id; }) - .filter(Boolean); + .filter((id): id is number => Boolean(id)); const removedImpact = loadCallersFromDb(baseDbPath, removedIds, maxDepth, noTests); const changedImpact = loadCallersFromDb(baseDbPath, changedIds, maxDepth, noTests); - // Attach impact to removed/changed for (const sym of removed) { const symCallers = loadCallersFromDb(baseDbPath, sym.id ? [sym.id] : [], maxDepth, noTests); - sym.impact = symCallers; + (sym as SymbolInfo & { impact?: CallerInfo[] }).impact = symCallers; } for (const sym of changed) { const baseSym = baseSymbols.get(makeSymbolKey(sym.kind, sym.file, sym.name)); @@ -343,17 +412,21 @@ export async function branchCompareData(baseRef, targetRef, opts = {}) { sym.impact = symCallers; } - // Summary - const allImpacted = new Set(); + const allImpacted = new Set(); for (const c of removedImpact) allImpacted.add(`${c.file}:${c.name}`); for (const c of changedImpact) allImpacted.add(`${c.file}:${c.name}`); - const impactedFiles = new Set(); - for (const key of allImpacted) impactedFiles.add(key.split(':')[0]); + const impactedFiles = new Set(); + for (const key of allImpacted) impactedFiles.add(key.split(':')[0]!); - // Remove id fields from output (internal only) - const cleanAdded = added.map(({ id, ...rest }) => rest); - const cleanRemoved = removed.map(({ id, ...rest }) => rest); + const cleanAdded = added.map(({ id: _id, ...rest }) => rest as SymbolWithoutId); + const cleanRemoved = removed.map(({ id: _id, ...rest }) => { + const result = rest as SymbolWithoutId; + if ((rest as SymbolInfo & { impact?: CallerInfo[] }).impact) { + result.impact = (rest as SymbolInfo & { impact?: CallerInfo[] }).impact; + } + return result; + }); return { baseRef, @@ -373,9 +446,8 @@ export async function branchCompareData(baseRef, targetRef, opts = {}) { }, }; } catch (err) { - return { error: err.message }; + return { error: (err as Error).message }; } finally { - // Clean up worktrees removeWorktree(repoRoot, baseDir); removeWorktree(repoRoot, targetDir); try { @@ -388,62 +460,63 @@ export async function branchCompareData(baseRef, targetRef, opts = {}) { // ─── Mermaid Output ───────────────────────────────────────────────────── -export function branchCompareMermaid(data) { +export function branchCompareMermaid(data: BranchCompareResult): string { if (data.error) return data.error; - if (data.added.length === 0 && data.removed.length === 0 && data.changed.length === 0) { + if ( + (data.added?.length ?? 0) === 0 && + (data.removed?.length ?? 0) === 0 && + (data.changed?.length ?? 0) === 0 + ) { return 'flowchart TB\n none["No structural differences detected"]'; } const lines = ['flowchart TB']; let nodeCounter = 0; - const nodeIdMap = new Map(); + const nodeIdMap = new Map(); - function nodeId(key) { + function nodeId(key: string): string { if (!nodeIdMap.has(key)) { nodeIdMap.set(key, `n${nodeCounter++}`); } - return nodeIdMap.get(key); + return nodeIdMap.get(key)!; } - // Added subgraph (green) - if (data.added.length > 0) { + if (data.added && data.added.length > 0) { lines.push(' subgraph sg_added["Added"]'); for (const sym of data.added) { const key = `added::${sym.kind}::${sym.file}::${sym.name}`; - const nid = nodeId(key, sym.name); + const nid = nodeId(key); lines.push(` ${nid}["[${kindIcon(sym.kind)}] ${sym.name}"]`); } lines.push(' end'); lines.push(' style sg_added fill:#e8f5e9,stroke:#4caf50'); } - // Removed subgraph (red) - if (data.removed.length > 0) { + if (data.removed && data.removed.length > 0) { lines.push(' subgraph sg_removed["Removed"]'); for (const sym of data.removed) { const key = `removed::${sym.kind}::${sym.file}::${sym.name}`; - const nid = nodeId(key, sym.name); + const nid = nodeId(key); lines.push(` ${nid}["[${kindIcon(sym.kind)}] ${sym.name}"]`); } lines.push(' end'); lines.push(' style sg_removed fill:#ffebee,stroke:#f44336'); } - // Changed subgraph (orange) - if (data.changed.length > 0) { + if (data.changed && data.changed.length > 0) { lines.push(' subgraph sg_changed["Changed"]'); for (const sym of data.changed) { const key = `changed::${sym.kind}::${sym.file}::${sym.name}`; - const nid = nodeId(key, sym.name); + const nid = nodeId(key); lines.push(` ${nid}["[${kindIcon(sym.kind)}] ${sym.name}"]`); } lines.push(' end'); lines.push(' style sg_changed fill:#fff3e0,stroke:#ff9800'); } - // Impacted callers subgraph (purple) - const allImpacted = new Map(); - for (const sym of [...data.removed, ...data.changed]) { + const allImpacted = new Map(); + const impactSources = [...(data.removed || []), ...(data.changed || [])]; + for (const sym of impactSources) { if (!sym.impact) continue; for (const c of sym.impact) { const key = `impact::${c.kind}::${c.file}::${c.name}`; @@ -454,17 +527,16 @@ export function branchCompareMermaid(data) { if (allImpacted.size > 0) { lines.push(' subgraph sg_impact["Impacted Callers"]'); for (const [key, c] of allImpacted) { - const nid = nodeId(key, c.name); + const nid = nodeId(key); lines.push(` ${nid}["[${kindIcon(c.kind)}] ${c.name}"]`); } lines.push(' end'); lines.push(' style sg_impact fill:#f3e5f5,stroke:#9c27b0'); } - // Edges: removed/changed -> impacted callers - for (const sym of [...data.removed, ...data.changed]) { + for (const sym of impactSources) { if (!sym.impact) continue; - const prefix = data.removed.includes(sym) ? 'removed' : 'changed'; + const prefix = (data.removed || []).includes(sym as SymbolWithoutId) ? 'removed' : 'changed'; const symKey = `${prefix}::${sym.kind}::${sym.file}::${sym.name}`; for (const c of sym.impact) { const callerKey = `impact::${c.kind}::${c.file}::${c.name}`; diff --git a/src/features/cfg.js b/src/features/cfg.ts similarity index 68% rename from src/features/cfg.js rename to src/features/cfg.ts index 3f029274..bf391a15 100644 --- a/src/features/cfg.js +++ b/src/features/cfg.ts @@ -1,10 +1,3 @@ -/** - * Intraprocedural Control Flow Graph (CFG) construction from tree-sitter AST. - * - * Builds basic-block CFGs for individual functions, stored in cfg_blocks + cfg_edges tables. - * Opt-in via `build --cfg`. Supports JS/TS/TSX, Python, Go, Rust, Java, C#, Ruby, PHP. - */ - import fs from 'node:fs'; import path from 'node:path'; import { CFG_RULES } from '../ast-analysis/rules/index.js'; @@ -25,42 +18,57 @@ import { } from '../db/index.js'; import { debug, info } from '../infrastructure/logger.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database, Definition, NodeRow, TreeSitterNode } from '../types.js'; import { findNodes } from './shared/find-nodes.js'; -// Re-export for backward compatibility export { _makeCfgRules as makeCfgRules, CFG_RULES }; const CFG_EXTENSIONS = buildExtensionSet(CFG_RULES); // ─── Core Algorithm: AST → CFG ────────────────────────────────────────── -/** - * Build a control flow graph for a single function AST node. - * - * Thin wrapper around the CFG visitor — runs walkWithVisitors on the function - * node and returns the first result. All CFG construction logic lives in - * `ast-analysis/visitors/cfg-visitor.js`. - * - * @param {object} functionNode - tree-sitter function AST node - * @param {string} langId - language identifier - * @returns {{ blocks: object[], edges: object[], cyclomatic: number }} - CFG blocks, edges, and derived cyclomatic - */ -export function buildFunctionCFG(functionNode, langId) { +interface CfgBuildBlock { + index: number; + type: string; + startLine: number; + endLine: number; + label: string; +} + +interface CfgBuildEdge { + sourceIndex: number; + targetIndex: number; + kind: string; +} + +interface CfgBuildResult { + blocks: CfgBuildBlock[]; + edges: CfgBuildEdge[]; + cyclomatic: number; +} + +export function buildFunctionCFG(functionNode: TreeSitterNode, langId: string): CfgBuildResult { const rules = CFG_RULES.get(langId); if (!rules) return { blocks: [], edges: [], cyclomatic: 0 }; const visitor = createCfgVisitor(rules); const walkerOpts = { functionNodeTypes: new Set(rules.functionNodes), - nestingNodeTypes: new Set(), - getFunctionName: (node) => { - const nameNode = node.childForFieldName('name'); + nestingNodeTypes: new Set(), + getFunctionName: (node: TreeSitterNode) => { + const nameNode = node.childForFieldName?.('name'); return nameNode ? nameNode.text : null; }, }; const results = walkWithVisitors(functionNode, [visitor], langId, walkerOpts); - const cfgResults = results.cfg || []; + // biome-ignore lint/complexity/useLiteralKeys: noPropertyAccessFromIndexSignature requires bracket notation + const cfgResults = (results['cfg'] || []) as Array<{ + funcNode: TreeSitterNode; + blocks: CfgBuildBlock[]; + edges: CfgBuildEdge[]; + cyclomatic: number; + }>; if (cfgResults.length === 0) return { blocks: [], edges: [], cyclomatic: 0 }; const r = cfgResults.find((result) => result.funcNode === functionNode); @@ -70,7 +78,15 @@ export function buildFunctionCFG(functionNode, langId) { // ─── Build-Time Helpers ───────────────────────────────────────────────── -async function initCfgParsers(fileSymbols) { +interface FileSymbols { + definitions: Definition[]; + _tree?: { rootNode: TreeSitterNode }; + _langId?: string; +} + +async function initCfgParsers( + fileSymbols: Map, +): Promise<{ parsers: unknown; getParserFn: unknown }> { let needsFallback = false; for (const [relPath, symbols] of fileSymbols) { @@ -79,7 +95,7 @@ async function initCfgParsers(fileSymbols) { if (CFG_EXTENSIONS.has(ext)) { const hasNativeCfg = symbols.definitions .filter((d) => (d.kind === 'function' || d.kind === 'method') && d.line) - .every((d) => d.cfg === null || d.cfg?.blocks?.length); + .every((d) => d.cfg === null || (d.cfg?.blocks?.length ?? 0) > 0); if (!hasNativeCfg) { needsFallback = true; break; @@ -88,8 +104,8 @@ async function initCfgParsers(fileSymbols) { } } - let parsers = null; - let getParserFn = null; + let parsers: unknown = null; + let getParserFn: unknown = null; if (needsFallback) { const { createParsers } = await import('../domain/parser.js'); @@ -101,14 +117,21 @@ async function initCfgParsers(fileSymbols) { return { parsers, getParserFn }; } -function getTreeAndLang(symbols, relPath, rootDir, extToLang, parsers, getParserFn) { +function getTreeAndLang( + symbols: FileSymbols, + relPath: string, + rootDir: string, + extToLang: Map, + parsers: unknown, + getParserFn: unknown, +): { tree: { rootNode: TreeSitterNode }; langId: string } | null { const ext = path.extname(relPath).toLowerCase(); let tree = symbols._tree; let langId = symbols._langId; const allNative = symbols.definitions .filter((d) => (d.kind === 'function' || d.kind === 'method') && d.line) - .every((d) => d.cfg === null || d.cfg?.blocks?.length); + .every((d) => d.cfg === null || (d.cfg?.blocks?.length ?? 0) > 0); if (!tree && !allNative) { if (!getParserFn) return null; @@ -116,21 +139,24 @@ function getTreeAndLang(symbols, relPath, rootDir, extToLang, parsers, getParser if (!langId || !CFG_RULES.has(langId)) return null; const absPath = path.join(rootDir, relPath); - let code; + let code: string; try { code = fs.readFileSync(absPath, 'utf-8'); } catch (e) { - debug(`cfg: cannot read ${relPath}: ${e.message}`); + debug(`cfg: cannot read ${relPath}: ${(e as Error).message}`); return null; } - const parser = getParserFn(parsers, absPath); + const parser = (getParserFn as (parsers: unknown, absPath: string) => unknown)( + parsers, + absPath, + ); if (!parser) return null; try { - tree = parser.parse(code); + tree = (parser as { parse: (code: string) => { rootNode: TreeSitterNode } }).parse(code); } catch (e) { - debug(`cfg: parse failed for ${relPath}: ${e.message}`); + debug(`cfg: parse failed for ${relPath}: ${(e as Error).message}`); return null; } } @@ -140,10 +166,21 @@ function getTreeAndLang(symbols, relPath, rootDir, extToLang, parsers, getParser if (!langId) return null; } - return { tree, langId }; + return { tree: tree!, langId }; +} + +interface VisitorCfgResult { + funcNode: TreeSitterNode; + blocks: CfgBuildBlock[]; + edges: CfgBuildEdge[]; } -function buildVisitorCfgMap(tree, cfgRules, symbols, langId) { +function buildVisitorCfgMap( + tree: { rootNode: TreeSitterNode } | undefined, + cfgRules: unknown, + symbols: FileSymbols, + langId: string, +): Map | null { const needsVisitor = tree && symbols.definitions.some( @@ -156,29 +193,36 @@ function buildVisitorCfgMap(tree, cfgRules, symbols, langId) { if (!needsVisitor) return null; const visitor = createCfgVisitor(cfgRules); + const typedRules = cfgRules as { functionNodes: string[] }; const walkerOpts = { - functionNodeTypes: new Set(cfgRules.functionNodes), - nestingNodeTypes: new Set(), - getFunctionName: (node) => { - const nameNode = node.childForFieldName('name'); + functionNodeTypes: new Set(typedRules.functionNodes), + nestingNodeTypes: new Set(), + getFunctionName: (node: TreeSitterNode) => { + const nameNode = node.childForFieldName?.('name'); return nameNode ? nameNode.text : null; }, }; - const walkResults = walkWithVisitors(tree.rootNode, [visitor], langId, walkerOpts); - const cfgResults = walkResults.cfg || []; - const visitorCfgByLine = new Map(); + const walkResults = walkWithVisitors(tree!.rootNode, [visitor], langId, walkerOpts); + // biome-ignore lint/complexity/useLiteralKeys: noPropertyAccessFromIndexSignature requires bracket notation + const cfgResults = (walkResults['cfg'] || []) as VisitorCfgResult[]; + const visitorCfgByLine = new Map(); for (const r of cfgResults) { if (r.funcNode) { const line = r.funcNode.startPosition.row + 1; if (!visitorCfgByLine.has(line)) visitorCfgByLine.set(line, []); - visitorCfgByLine.get(line).push(r); + visitorCfgByLine.get(line)!.push(r); } } return visitorCfgByLine; } -function persistCfg(cfg, nodeId, insertBlock, insertEdge) { - const blockDbIds = new Map(); +function persistCfg( + cfg: { blocks: CfgBuildBlock[]; edges: CfgBuildEdge[] }, + nodeId: number, + insertBlock: ReturnType, + insertEdge: ReturnType, +): void { + const blockDbIds = new Map(); for (const block of cfg.blocks) { const result = insertBlock.run( nodeId, @@ -202,15 +246,12 @@ function persistCfg(cfg, nodeId, insertBlock, insertEdge) { // ─── Build-Time: Compute CFG for Changed Files ───────────────────────── -/** - * Build CFG data for all function/method definitions and persist to DB. - * - * @param {object} db - open better-sqlite3 database (read-write) - * @param {Map} fileSymbols - Map - * @param {string} rootDir - absolute project root path - * @param {object} [_engineOpts] - engine options (unused; always uses WASM for AST) - */ -export async function buildCFGData(db, fileSymbols, rootDir, _engineOpts) { +export async function buildCFGData( + db: BetterSqlite3Database, + fileSymbols: Map, + rootDir: string, + _engineOpts?: unknown, +): Promise { const extToLang = buildExtToLangMap(); const { parsers, getParserFn } = await initCfgParsers(fileSymbols); @@ -245,9 +286,9 @@ export async function buildCFGData(db, fileSymbols, rootDir, _engineOpts) { const nodeId = getFunctionNodeId(db, def.name, relPath, def.line); if (!nodeId) continue; - let cfg = null; + let cfg: { blocks: CfgBuildBlock[]; edges: CfgBuildEdge[] } | null = null; if (def.cfg?.blocks?.length) { - cfg = def.cfg; + cfg = def.cfg as unknown as { blocks: CfgBuildBlock[]; edges: CfgBuildEdge[] }; } else if (visitorCfgByLine) { const candidates = visitorCfgByLine.get(def.line); const r = !candidates @@ -255,7 +296,7 @@ export async function buildCFGData(db, fileSymbols, rootDir, _engineOpts) { : candidates.length === 1 ? candidates[0] : (candidates.find((c) => { - const n = c.funcNode.childForFieldName('name'); + const n = c.funcNode.childForFieldName?.('name'); return n && n.text === def.name; }) ?? candidates[0]); if (r) cfg = { blocks: r.blocks, edges: r.edges }; @@ -281,15 +322,52 @@ export async function buildCFGData(db, fileSymbols, rootDir, _engineOpts) { const CFG_DEFAULT_KINDS = ['function', 'method']; -/** - * Load CFG data for a function from the database. - * - * @param {string} name - Function name (partial match) - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - Options - * @returns {{ function: object, blocks: object[], edges: object[], summary: object }} - */ -export function cfgData(name, customDbPath, opts = {}) { +interface CfgQueryBlock { + index: number; + type: string; + startLine: number; + endLine: number; + label: string | null; +} + +interface CfgQueryEdge { + source: number; + sourceType: string; + target: number; + targetType: string; + kind: string; +} + +interface CfgQueryFunctionResult { + name: string; + kind: string; + file: string; + line: number; + blocks: CfgQueryBlock[]; + edges: CfgQueryEdge[]; + summary: { blockCount: number; edgeCount: number }; +} + +interface CfgDataResult { + name: string; + results: CfgQueryFunctionResult[]; + warning?: string; + _pagination?: unknown; +} + +interface CfgOpts { + noTests?: boolean; + file?: string | string[]; + kind?: string; + limit?: number; + offset?: number; +} + +export function cfgData( + name: string, + customDbPath: string | undefined, + opts: CfgOpts = {}, +): CfgDataResult { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -313,7 +391,7 @@ export function cfgData(name, customDbPath, opts = {}) { return { name, results: [] }; } - const results = nodes.map((node) => { + const results: CfgQueryFunctionResult[] = nodes.map((node: NodeRow) => { const cfgBlocks = getCfgBlocks(db, node.id); const cfgEdges = getCfgEdges(db, node.id); @@ -351,11 +429,8 @@ export function cfgData(name, customDbPath, opts = {}) { // ─── Export Formats ───────────────────────────────────────────────────── -/** - * Convert CFG data to DOT format for Graphviz rendering. - */ -export function cfgToDOT(cfgResult) { - const lines = []; +export function cfgToDOT(cfgResult: CfgDataResult): string { + const lines: string[] = []; for (const r of cfgResult.results) { lines.push(`digraph "${r.name}" {`); @@ -383,11 +458,8 @@ export function cfgToDOT(cfgResult) { return lines.join('\n'); } -/** - * Convert CFG data to Mermaid format. - */ -export function cfgToMermaid(cfgResult) { - const lines = []; +export function cfgToMermaid(cfgResult: CfgDataResult): string { + const lines: string[] = []; for (const r of cfgResult.results) { lines.push(`graph TD`); @@ -415,7 +487,7 @@ export function cfgToMermaid(cfgResult) { return lines.join('\n'); } -function blockLabel(block) { +function blockLabel(block: CfgQueryBlock): string { const loc = block.startLine && block.endLine ? ` L${block.startLine}${block.endLine !== block.startLine ? `-${block.endLine}` : ''}` @@ -424,7 +496,7 @@ function blockLabel(block) { return `${block.type}${label}${loc}`; } -function edgeStyle(kind) { +function edgeStyle(kind: string): string { if (kind === 'exception') return ', color=red, fontcolor=red'; if (kind === 'branch_true') return ', color=green, fontcolor=green'; if (kind === 'branch_false') return ', color=red, fontcolor=red'; diff --git a/src/features/check.js b/src/features/check.ts similarity index 69% rename from src/features/check.js rename to src/features/check.ts index 4b71df2d..0e7c7d55 100644 --- a/src/features/check.js +++ b/src/features/check.ts @@ -6,22 +6,27 @@ import { bfsTransitiveCallers } from '../domain/analysis/impact.js'; import { findCycles } from '../domain/graph/cycles.js'; import { loadConfig } from '../infrastructure/config.js'; import { isTestFile } from '../infrastructure/test-filter.js'; +import type { BetterSqlite3Database, CodegraphConfig } from '../types.js'; import { matchOwners, parseCodeowners } from './owners.js'; // ─── Diff Parser ────────────────────────────────────────────────────── -/** - * Parse unified diff output, extracting both new-side (+) and old-side (-) ranges. - * Old-side ranges are needed for signature detection (DB line numbers = pre-change). - * - * @param {string} diffOutput - Raw `git diff --unified=0` output - * @returns {{ changedRanges: Map, oldRanges: Map, newFiles: Set }} - */ -export function parseDiffOutput(diffOutput) { - const changedRanges = new Map(); - const oldRanges = new Map(); - const newFiles = new Set(); - let currentFile = null; +interface DiffRange { + start: number; + end: number; +} + +interface ParsedDiff { + changedRanges: Map; + oldRanges: Map; + newFiles: Set; +} + +export function parseDiffOutput(diffOutput: string): ParsedDiff { + const changedRanges = new Map(); + const oldRanges = new Map(); + const newFiles = new Set(); + let currentFile: string | null = null; let prevIsDevNull = false; for (const line of diffOutput.split('\n')) { @@ -35,7 +40,7 @@ export function parseDiffOutput(diffOutput) { } const fileMatch = line.match(/^\+\+\+ b\/(.+)/); if (fileMatch) { - currentFile = fileMatch[1]; + currentFile = fileMatch[1]!; if (!changedRanges.has(currentFile)) changedRanges.set(currentFile, []); if (!oldRanges.has(currentFile)) oldRanges.set(currentFile, []); if (prevIsDevNull) newFiles.add(currentFile); @@ -44,15 +49,15 @@ export function parseDiffOutput(diffOutput) { } const hunkMatch = line.match(/^@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/); if (hunkMatch && currentFile) { - const oldStart = parseInt(hunkMatch[1], 10); + const oldStart = parseInt(hunkMatch[1]!, 10); const oldCount = parseInt(hunkMatch[2] || '1', 10); if (oldCount > 0) { - oldRanges.get(currentFile).push({ start: oldStart, end: oldStart + oldCount - 1 }); + oldRanges.get(currentFile)!.push({ start: oldStart, end: oldStart + oldCount - 1 }); } - const newStart = parseInt(hunkMatch[3], 10); + const newStart = parseInt(hunkMatch[3]!, 10); const newCount = parseInt(hunkMatch[4] || '1', 10); if (newCount > 0) { - changedRanges.get(currentFile).push({ start: newStart, end: newStart + newCount - 1 }); + changedRanges.get(currentFile)!.push({ start: newStart, end: newStart + newCount - 1 }); } } } @@ -61,20 +66,44 @@ export function parseDiffOutput(diffOutput) { // ─── Predicates ─────────────────────────────────────────────────────── -/** - * Predicate 1: Assert no dependency cycles involve changed files. - */ -export function checkNoNewCycles(db, changedFiles, noTests) { +interface CyclesResult { + passed: boolean; + cycles: string[][]; +} + +export function checkNoNewCycles( + db: BetterSqlite3Database, + changedFiles: Set, + noTests: boolean, +): CyclesResult { const cycles = findCycles(db, { fileLevel: true, noTests }); const involved = cycles.filter((cycle) => cycle.some((f) => changedFiles.has(f))); return { passed: involved.length === 0, cycles: involved }; } -/** - * Predicate 2: Assert no function exceeds N transitive callers. - */ -export function checkMaxBlastRadius(db, changedRanges, threshold, noTests, maxDepth) { - const violations = []; +interface BlastRadiusViolation { + name: string; + kind: string; + file: string; + line: number; + transitiveCallers: number; +} + +interface BlastRadiusResult { + passed: boolean; + maxFound: number; + threshold: number; + violations: BlastRadiusViolation[]; +} + +export function checkMaxBlastRadius( + db: BetterSqlite3Database, + changedRanges: Map, + threshold: number, + noTests: boolean, + maxDepth: number, +): BlastRadiusResult { + const violations: BlastRadiusViolation[] = []; let maxFound = 0; for (const [file, ranges] of changedRanges) { @@ -83,11 +112,19 @@ export function checkMaxBlastRadius(db, changedRanges, threshold, noTests, maxDe .prepare( `SELECT * FROM nodes WHERE file = ? AND kind IN ('function', 'method', 'class') ORDER BY line`, ) - .all(file); + .all(file) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + end_line: number | null; + }>; for (let i = 0; i < defs.length; i++) { - const def = defs[i]; - const endLine = def.end_line || (defs[i + 1] ? defs[i + 1].line - 1 : 999999); + const def = defs[i]!; + const nextDef = defs[i + 1]; + const endLine = def.end_line || (nextDef ? nextDef.line - 1 : 999999); let overlaps = false; for (const range of ranges) { if (range.start <= endLine && range.end >= def.line) { @@ -118,12 +155,24 @@ export function checkMaxBlastRadius(db, changedRanges, threshold, noTests, maxDe return { passed: violations.length === 0, maxFound, threshold, violations }; } -/** - * Predicate 3: Assert no function declaration lines were modified. - * Uses old-side hunk ranges (which correspond to DB line numbers from last build). - */ -export function checkNoSignatureChanges(db, oldRanges, noTests) { - const violations = []; +interface SignatureViolation { + name: string; + kind: string; + file: string; + line: number; +} + +interface SignatureResult { + passed: boolean; + violations: SignatureViolation[]; +} + +export function checkNoSignatureChanges( + db: BetterSqlite3Database, + oldRanges: Map, + noTests: boolean, +): SignatureResult { + const violations: SignatureViolation[] = []; for (const [file, ranges] of oldRanges) { if (ranges.length === 0) continue; @@ -133,7 +182,7 @@ export function checkNoSignatureChanges(db, oldRanges, noTests) { .prepare( `SELECT name, kind, file, line FROM nodes WHERE file = ? AND kind IN ('function', 'method', 'class') ORDER BY line`, ) - .all(file); + .all(file) as SignatureViolation[]; for (const def of defs) { for (const range of ranges) { @@ -153,10 +202,24 @@ export function checkNoSignatureChanges(db, oldRanges, noTests) { return { passed: violations.length === 0, violations }; } -/** - * Predicate 4: Assert no cross-owner boundary violations among changed files. - */ -export function checkNoBoundaryViolations(db, changedFiles, repoRoot, noTests) { +interface BoundaryViolation { + from: string; + to: string; + edgeKind: string; +} + +interface BoundaryResult { + passed: boolean; + violations: BoundaryViolation[]; + note?: string; +} + +export function checkNoBoundaryViolations( + db: BetterSqlite3Database, + changedFiles: Set | string[], + repoRoot: string, + noTests: boolean, +): BoundaryResult { const parsed = parseCodeowners(repoRoot); if (!parsed) { return { passed: true, violations: [], note: 'No CODEOWNERS file found — skipped' }; @@ -172,9 +235,9 @@ export function checkNoBoundaryViolations(db, changedFiles, repoRoot, noTests) { JOIN nodes t ON e.target_id = t.id WHERE e.kind = 'calls'`, ) - .all(); + .all() as Array<{ edgeKind: string; srcFile: string; tgtFile: string }>; - const violations = []; + const violations: BoundaryViolation[] = []; for (const e of edges) { if (noTests && (isTestFile(e.srcFile) || isTestFile(e.tgtFile))) continue; if (!changedSet.has(e.srcFile) && !changedSet.has(e.tgtFile)) continue; @@ -195,22 +258,40 @@ export function checkNoBoundaryViolations(db, changedFiles, repoRoot, noTests) { // ─── Main ───────────────────────────────────────────────────────────── -/** - * Run validation predicates against git changes. - * - * @param {string} [customDbPath] - Path to graph.db - * @param {object} opts - * @param {string} [opts.ref] - Git ref to diff against - * @param {boolean} [opts.staged] - Analyze staged changes - * @param {boolean} [opts.cycles] - Enable cycles predicate - * @param {number} [opts.blastRadius] - Blast radius threshold - * @param {boolean} [opts.signatures] - Enable signatures predicate - * @param {boolean} [opts.boundaries] - Enable boundaries predicate - * @param {number} [opts.depth] - Max BFS depth (default: 3) - * @param {boolean} [opts.noTests] - Exclude test files - * @returns {{ predicates: object[], summary: object, passed: boolean }} - */ -export function checkData(customDbPath, opts = {}) { +interface PredicateResult { + name: string; + passed: boolean; + [key: string]: unknown; +} + +interface CheckSummary { + total: number; + passed: number; + failed: number; + changedFiles: number; + newFiles: number; +} + +interface CheckResult { + error?: string; + predicates?: PredicateResult[]; + summary?: CheckSummary; + passed?: boolean; +} + +interface CheckOpts { + ref?: string; + staged?: boolean; + cycles?: boolean; + blastRadius?: number | null; + signatures?: boolean; + boundaries?: boolean; + depth?: number; + noTests?: boolean; + config?: CodegraphConfig; +} + +export function checkData(customDbPath: string | undefined, opts: CheckOpts = {}): CheckResult { const db = openReadonlyOrFail(customDbPath); try { @@ -219,20 +300,14 @@ export function checkData(customDbPath, opts = {}) { const noTests = opts.noTests || false; const maxDepth = opts.depth || 3; - // Load config defaults for check predicates - // NOTE: opts.config is loaded from process.cwd() at startup (via CLI context), - // which may differ from the DB's parent repo root when --db points to an external - // project. This is an acceptable trade-off to avoid duplicate I/O on the hot path. const config = opts.config || loadConfig(repoRoot); - const checkConfig = config.check || {}; + const checkConfig = config.check || ({} as CodegraphConfig['check']); - // Resolve which predicates are enabled: CLI flags ?? config ?? built-in defaults const enableCycles = opts.cycles ?? checkConfig.cycles ?? true; const enableSignatures = opts.signatures ?? checkConfig.signatures ?? true; const enableBoundaries = opts.boundaries ?? checkConfig.boundaries ?? true; const blastRadiusThreshold = opts.blastRadius ?? checkConfig.blastRadius ?? null; - // Verify git repo let checkDir = repoRoot; let isGitRepo = false; while (checkDir) { @@ -248,8 +323,7 @@ export function checkData(customDbPath, opts = {}) { return { error: `Not a git repository: ${repoRoot}` }; } - // Run git diff - let diffOutput; + let diffOutput: string; try { const args = opts.staged ? ['diff', '--cached', '--unified=0', '--no-color'] @@ -261,7 +335,7 @@ export function checkData(customDbPath, opts = {}) { stdio: ['pipe', 'pipe', 'pipe'], }); } catch (e) { - return { error: `Failed to run git diff: ${e.message}` }; + return { error: `Failed to run git diff: ${(e as Error).message}` }; } if (!diffOutput.trim()) { @@ -283,8 +357,7 @@ export function checkData(customDbPath, opts = {}) { const changedFiles = new Set(changedRanges.keys()); - // Execute enabled predicates - const predicates = []; + const predicates: PredicateResult[] = []; if (enableCycles) { const result = checkNoNewCycles(db, changedFiles, noTests); diff --git a/src/features/cochange.js b/src/features/cochange.ts similarity index 72% rename from src/features/cochange.js rename to src/features/cochange.ts index 4c531dfe..36b8d80b 100644 --- a/src/features/cochange.js +++ b/src/features/cochange.ts @@ -13,16 +13,31 @@ import { warn } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { normalizePath } from '../shared/constants.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database } from '../types.js'; -/** - * Scan git history and return parsed commit data. - * @param {string} repoRoot - Absolute path to the git repo root - * @param {object} [opts] - * @param {string} [opts.since] - Git date expression (e.g. "1 year ago") - * @param {string} [opts.afterSha] - Only include commits after this SHA - * @returns {{ commits: Array<{sha: string, epoch: number, files: string[]}> }} - */ -export function scanGitHistory(repoRoot, opts = {}) { +interface CommitEntry { + sha: string; + epoch: number; + files: string[]; +} + +interface CoChangePair { + commitCount: number; + jaccard: number; + lastEpoch: number; +} + +interface CoChangeMeta { + analyzedAt: string | null; + since: string | null; + minSupport: number | null; + lastCommit: string | null; +} + +export function scanGitHistory( + repoRoot: string, + opts: { since?: string; afterSha?: string | null } = {}, +): { commits: CommitEntry[] } { const args = [ 'log', '--name-only', @@ -34,7 +49,7 @@ export function scanGitHistory(repoRoot, opts = {}) { if (opts.afterSha) args.push(`${opts.afterSha}..HEAD`); args.push('--', '.'); - let output; + let output: string; try { output = execFileSync('git', args, { cwd: repoRoot, @@ -42,21 +57,21 @@ export function scanGitHistory(repoRoot, opts = {}) { maxBuffer: 50 * 1024 * 1024, stdio: ['pipe', 'pipe', 'pipe'], }); - } catch (e) { - warn(`Failed to scan git history: ${e.message}`); + } catch (e: unknown) { + warn(`Failed to scan git history: ${(e as Error).message}`); return { commits: [] }; } if (!output.trim()) return { commits: [] }; - const commits = []; + const commits: CommitEntry[] = []; // Split on double newlines to get blocks; each block is sha\nepoch\nfile1\nfile2... const blocks = output.trim().split(/\n\n+/); for (const block of blocks) { const lines = block.split('\n').filter((l) => l.length > 0); if (lines.length < 2) continue; - const sha = lines[0]; - const epoch = parseInt(lines[1], 10); + const sha = lines[0]!; + const epoch = parseInt(lines[1]!, 10); if (Number.isNaN(epoch)) continue; const files = lines.slice(2).map((f) => normalizePath(f)); if (files.length > 0) { @@ -67,23 +82,17 @@ export function scanGitHistory(repoRoot, opts = {}) { return { commits }; } -/** - * Compute co-change pairs from parsed commit data. - * @param {Array<{sha: string, epoch: number, files: string[]}>} commits - * @param {object} [opts] - * @param {number} [opts.minSupport=3] - Minimum number of co-occurrences - * @param {number} [opts.maxFilesPerCommit=50] - Skip commits with too many files - * @param {Set} [opts.knownFiles] - If provided, only include pairs where both files are in this set - * @returns {Map} - */ -export function computeCoChanges(commits, opts = {}) { +export function computeCoChanges( + commits: CommitEntry[], + opts: { minSupport?: number; maxFilesPerCommit?: number; knownFiles?: Set | null } = {}, +): { pairs: Map; fileCommitCounts: Map } { const minSupport = opts.minSupport ?? 3; const maxFilesPerCommit = opts.maxFilesPerCommit ?? 50; const knownFiles = opts.knownFiles || null; - const fileCommitCounts = new Map(); - const pairCounts = new Map(); - const pairLastEpoch = new Map(); + const fileCommitCounts = new Map(); + const pairCounts = new Map(); + const pairLastEpoch = new Map(); for (const commit of commits) { let { files } = commit; @@ -111,10 +120,10 @@ export function computeCoChanges(commits, opts = {}) { } // Filter by minSupport and compute Jaccard - const results = new Map(); + const results = new Map(); for (const [key, count] of pairCounts) { if (count < minSupport) continue; - const [fileA, fileB] = key.split('\0'); + const [fileA, fileB] = key.split('\0') as [string, string]; const countA = fileCommitCounts.get(fileA) || 0; const countB = fileCommitCounts.get(fileB) || 0; const jaccard = count / (countA + countB - count); @@ -128,17 +137,17 @@ export function computeCoChanges(commits, opts = {}) { return { pairs: results, fileCommitCounts }; } -/** - * Analyze git history and populate co-change data in the database. - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - * @param {string} [opts.since] - Git date expression - * @param {number} [opts.minSupport] - Minimum co-occurrence count - * @param {number} [opts.maxFilesPerCommit] - Max files per commit - * @param {boolean} [opts.full] - Force full re-scan - * @returns {{ pairsFound: number, commitsScanned: number, since: string, minSupport: number }} - */ -export function analyzeCoChanges(customDbPath, opts = {}) { +export function analyzeCoChanges( + customDbPath?: string, + opts: { + since?: string; + minSupport?: number; + maxFilesPerCommit?: number; + full?: boolean; + } = {}, +): + | { pairsFound: number; commitsScanned: number; since: string; minSupport: number } + | { error: string } { const dbPath = findDbPath(customDbPath); const db = openDb(dbPath); initSchema(db); @@ -155,11 +164,13 @@ export function analyzeCoChanges(customDbPath, opts = {}) { const maxFilesPerCommit = opts.maxFilesPerCommit ?? 50; // Check for incremental state - let afterSha = null; + let afterSha: string | null = null; if (!opts.full) { try { const row = db - .prepare("SELECT value FROM co_change_meta WHERE key = 'last_analyzed_commit'") + .prepare<{ value: string }>( + "SELECT value FROM co_change_meta WHERE key = 'last_analyzed_commit'", + ) .get(); if (row) afterSha = row.value; } catch { @@ -175,9 +186,9 @@ export function analyzeCoChanges(customDbPath, opts = {}) { } // Collect known files from the graph for filtering - let knownFiles = null; + let knownFiles: Set | null = null; try { - const rows = db.prepare('SELECT DISTINCT file FROM nodes').all(); + const rows = db.prepare<{ file: string }>('SELECT DISTINCT file FROM nodes').all(); knownFiles = new Set(rows.map((r) => r.file)); } catch { /* nodes table may not exist */ @@ -210,7 +221,7 @@ export function analyzeCoChanges(customDbPath, opts = {}) { fileCountUpsert.run(file, count); } for (const [key, data] of coChanges) { - const [fileA, fileB] = key.split('\0'); + const [fileA, fileB] = key.split('\0') as [string, string]; pairUpsert.run(fileA, fileB, data.commitCount, data.lastEpoch); } }); @@ -238,13 +249,15 @@ export function analyzeCoChanges(customDbPath, opts = {}) { ON CONFLICT(key) DO UPDATE SET value = excluded.value `); if (commits.length > 0) { - metaUpsert.run('last_analyzed_commit', commits[0].sha); + metaUpsert.run('last_analyzed_commit', commits[0]!.sha); } metaUpsert.run('analyzed_at', new Date().toISOString()); metaUpsert.run('since', since); metaUpsert.run('min_support', String(minSupport)); - const totalPairs = db.prepare('SELECT COUNT(*) as cnt FROM co_changes').get().cnt; + const totalPairs = db + .prepare<{ cnt: number }>('SELECT COUNT(*) as cnt FROM co_changes') + .get()!.cnt; closeDb(db); @@ -256,17 +269,19 @@ export function analyzeCoChanges(customDbPath, opts = {}) { }; } -/** - * Query co-change partners for a specific file. - * @param {string} file - File path (partial match supported) - * @param {string} [customDbPath] - * @param {object} [opts] - * @param {number} [opts.limit=20] - * @param {number} [opts.minJaccard=0.3] - * @param {boolean} [opts.noTests] - * @returns {{ file: string, partners: Array, meta: object }} - */ -export function coChangeData(file, customDbPath, opts = {}) { +interface CoChangeRow { + file_a: string; + file_b: string; + commit_count: number; + jaccard: number; + last_commit_epoch: number; +} + +export function coChangeData( + file: string, + customDbPath?: string, + opts: { limit?: number; minJaccard?: number; noTests?: boolean; offset?: number } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); const limit = opts.limit || 20; const minJaccard = opts.minJaccard ?? 0.3; @@ -288,7 +303,7 @@ export function coChangeData(file, customDbPath, opts = {}) { } const rows = db - .prepare( + .prepare( `SELECT file_a, file_b, commit_count, jaccard, last_commit_epoch FROM co_changes WHERE (file_a = ? OR file_b = ?) AND jaccard >= ? @@ -296,7 +311,12 @@ export function coChangeData(file, customDbPath, opts = {}) { ) .all(resolvedFile, resolvedFile, minJaccard); - const partners = []; + const partners: Array<{ + file: string; + commitCount: number; + jaccard: number; + lastCommitDate: string | null; + }> = []; for (const row of rows) { const partner = row.file_a === resolvedFile ? row.file_b : row.file_a; if (noTests && isTestFile(partner)) continue; @@ -318,16 +338,10 @@ export function coChangeData(file, customDbPath, opts = {}) { return paginateResult(base, 'partners', { limit: opts.limit, offset: opts.offset }); } -/** - * Query top global co-change pairs. - * @param {string} [customDbPath] - * @param {object} [opts] - * @param {number} [opts.limit=20] - * @param {number} [opts.minJaccard=0.3] - * @param {boolean} [opts.noTests] - * @returns {{ pairs: Array, meta: object }} - */ -export function coChangeTopData(customDbPath, opts = {}) { +export function coChangeTopData( + customDbPath?: string, + opts: { limit?: number; minJaccard?: number; noTests?: boolean; offset?: number } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); const limit = opts.limit || 20; const minJaccard = opts.minJaccard ?? 0.3; @@ -341,7 +355,7 @@ export function coChangeTopData(customDbPath, opts = {}) { } const rows = db - .prepare( + .prepare( `SELECT file_a, file_b, commit_count, jaccard, last_commit_epoch FROM co_changes WHERE jaccard >= ? @@ -349,7 +363,13 @@ export function coChangeTopData(customDbPath, opts = {}) { ) .all(minJaccard); - const pairs = []; + const pairs: Array<{ + fileA: string; + fileB: string; + commitCount: number; + jaccard: number; + lastCommitDate: string | null; + }> = []; for (const row of rows) { if (noTests && (isTestFile(row.file_a) || isTestFile(row.file_b))) continue; pairs.push({ @@ -371,18 +391,11 @@ export function coChangeTopData(customDbPath, opts = {}) { return paginateResult(base, 'pairs', { limit: opts.limit, offset: opts.offset }); } -/** - * Batch-query co-change partners for a set of files. - * Takes an already-open readonly DB handle (for diff-impact integration). - * @param {string[]} files - File paths to query - * @param {import('better-sqlite3').Database} db - Already-open DB handle - * @param {object} [opts] - * @param {number} [opts.minJaccard=0.3] - * @param {number} [opts.limit=20] - * @param {boolean} [opts.noTests] - * @returns {Array<{file: string, coupledWith: string, commitCount: number, jaccard: number}>} - */ -export function coChangeForFiles(files, db, opts = {}) { +export function coChangeForFiles( + files: string[], + db: BetterSqlite3Database, + opts: { minJaccard?: number; limit?: number; noTests?: boolean } = {}, +): Array<{ file: string; coupledWith: string; commitCount: number; jaccard: number }> { const minJaccard = opts.minJaccard ?? 0.3; const limit = opts.limit ?? 20; const noTests = opts.noTests || false; @@ -392,7 +405,7 @@ export function coChangeForFiles(files, db, opts = {}) { const placeholders = files.map(() => '?').join(','); const rows = db - .prepare( + .prepare<{ file_a: string; file_b: string; commit_count: number; jaccard: number }>( `SELECT file_a, file_b, commit_count, jaccard FROM co_changes WHERE (file_a IN (${placeholders}) OR file_b IN (${placeholders})) @@ -402,7 +415,12 @@ export function coChangeForFiles(files, db, opts = {}) { ) .all(...files, ...files, minJaccard, limit); - const results = []; + const results: Array<{ + file: string; + coupledWith: string; + commitCount: number; + jaccard: number; + }> = []; for (const row of rows) { const partner = inputSet.has(row.file_a) ? row.file_b : row.file_a; const source = inputSet.has(row.file_a) ? row.file_a : row.file_b; @@ -421,10 +439,10 @@ export function coChangeForFiles(files, db, opts = {}) { // ─── Internal Helpers ──────────────────────────────────────────────────── -function resolveCoChangeFile(db, file) { +function resolveCoChangeFile(db: BetterSqlite3Database, file: string): string | null { // Exact match first const exact = db - .prepare( + .prepare<{ file_a: string }>( 'SELECT file_a FROM co_changes WHERE file_a = ? UNION SELECT file_b FROM co_changes WHERE file_b = ? LIMIT 1', ) .get(file, file); @@ -432,7 +450,7 @@ function resolveCoChangeFile(db, file) { // Partial match (ends with) const partial = db - .prepare( + .prepare<{ file: string }>( `SELECT file_a AS file FROM co_changes WHERE file_a LIKE ? UNION SELECT file_b AS file FROM co_changes WHERE file_b LIKE ? @@ -444,18 +462,20 @@ function resolveCoChangeFile(db, file) { return null; } -function getCoChangeMeta(db) { +function getCoChangeMeta(db: BetterSqlite3Database): CoChangeMeta | null { try { - const rows = db.prepare('SELECT key, value FROM co_change_meta').all(); - const meta = {}; + const rows = db + .prepare<{ key: string; value: string }>('SELECT key, value FROM co_change_meta') + .all(); + const meta: Record = {}; for (const row of rows) { meta[row.key] = row.value; } return { - analyzedAt: meta.analyzed_at || null, - since: meta.since || null, - minSupport: meta.min_support ? parseInt(meta.min_support, 10) : null, - lastCommit: meta.last_analyzed_commit || null, + analyzedAt: meta['analyzed_at'] || null, + since: meta['since'] || null, + minSupport: meta['min_support'] ? parseInt(meta['min_support'], 10) : null, + lastCommit: meta['last_analyzed_commit'] || null, }; } catch { return null; diff --git a/src/features/communities.js b/src/features/communities.ts similarity index 65% rename from src/features/communities.js rename to src/features/communities.ts index 23725b05..f448e176 100644 --- a/src/features/communities.js +++ b/src/features/communities.ts @@ -2,49 +2,60 @@ import path from 'node:path'; import { openRepo } from '../db/index.js'; import { louvainCommunities } from '../graph/algorithms/louvain.js'; import { buildDependencyGraph } from '../graph/builders/dependency.js'; +import type { CodeGraph } from '../graph/model.js'; import { loadConfig } from '../infrastructure/config.js'; import { paginateResult } from '../shared/paginate.js'; +import type { CodegraphConfig, Repository } from '../types.js'; // ─── Directory Helpers ──────────────────────────────────────────────── -function getDirectory(filePath) { +function getDirectory(filePath: string): string { const dir = path.dirname(filePath); return dir === '.' ? '(root)' : dir; } // ─── Community Building ────────────────────────────────────────────── -/** - * Group graph nodes by Louvain community assignment and build structured objects. - * @param {object} graph - The dependency graph - * @param {Map} assignments - Node key → community ID - * @param {object} opts - * @param {boolean} [opts.drift] - If true, omit member lists - * @returns {{ communities: object[], communityDirs: Map> }} - */ -function buildCommunityObjects(graph, assignments, opts) { - const communityMap = new Map(); +interface CommunityMember { + name: string; + file: string; + kind?: string; +} + +interface CommunityObject { + id: number; + size: number; + directories: Record; + members?: CommunityMember[]; +} + +function buildCommunityObjects( + graph: CodeGraph, + assignments: Map, + opts: { drift?: boolean }, +): { communities: CommunityObject[]; communityDirs: Map> } { + const communityMap = new Map(); for (const [key] of graph.nodes()) { const cid = assignments.get(key); if (cid == null) continue; if (!communityMap.has(cid)) communityMap.set(cid, []); - communityMap.get(cid).push(key); + communityMap.get(cid)!.push(key); } - const communities = []; - const communityDirs = new Map(); + const communities: CommunityObject[] = []; + const communityDirs = new Map>(); for (const [cid, members] of communityMap) { - const dirCounts = {}; - const memberData = []; + const dirCounts: Record = {}; + const memberData: CommunityMember[] = []; for (const key of members) { - const attrs = graph.getNodeAttrs(key); - const dir = getDirectory(attrs.file); + const attrs = graph.getNodeAttrs(key)!; + const dir = getDirectory(attrs['file'] as string); dirCounts[dir] = (dirCounts[dir] || 0) + 1; memberData.push({ - name: attrs.label, - file: attrs.file, - ...(attrs.kind ? { kind: attrs.kind } : {}), + name: attrs['label'] as string, + file: attrs['file'] as string, + ...(attrs['kind'] ? { kind: attrs['kind'] as string } : {}), }); } @@ -64,22 +75,30 @@ function buildCommunityObjects(graph, assignments, opts) { // ─── Drift Analysis ────────────────────────────────────────────────── -/** - * Compute split/merge candidates and drift score from community directory data. - * @param {object[]} communities - Community objects with `directories` - * @param {Map>} communityDirs - Community ID → directory set - * @returns {{ splitCandidates: object[], mergeCandidates: object[], driftScore: number }} - */ -function analyzeDrift(communities, communityDirs) { - const dirToCommunities = new Map(); +interface DriftResult { + splitCandidates: Array<{ directory: string; communityCount: number }>; + mergeCandidates: Array<{ + communityId: number; + size: number; + directoryCount: number; + directories: string[]; + }>; + driftScore: number; +} + +function analyzeDrift( + communities: CommunityObject[], + communityDirs: Map>, +): DriftResult { + const dirToCommunities = new Map>(); for (const [cid, dirs] of communityDirs) { for (const dir of dirs) { if (!dirToCommunities.has(dir)) dirToCommunities.set(dir, new Set()); - dirToCommunities.get(dir).add(cid); + dirToCommunities.get(dir)!.add(cid); } } - const splitCandidates = []; + const splitCandidates: DriftResult['splitCandidates'] = []; for (const [dir, cids] of dirToCommunities) { if (cids.size >= 2) { splitCandidates.push({ directory: dir, communityCount: cids.size }); @@ -87,7 +106,7 @@ function analyzeDrift(communities, communityDirs) { } splitCandidates.sort((a, b) => b.communityCount - a.communityCount); - const mergeCandidates = []; + const mergeCandidates: DriftResult['mergeCandidates'] = []; for (const c of communities) { const dirCount = Object.keys(c.directories).length; if (dirCount >= 2) { @@ -112,21 +131,25 @@ function analyzeDrift(communities, communityDirs) { // ─── Core Analysis ──────────────────────────────────────────────────── -/** - * Run Louvain community detection and return structured data. - * - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - * @param {boolean} [opts.functions] - Function-level instead of file-level - * @param {number} [opts.resolution] - Louvain resolution (default 1.0) - * @param {boolean} [opts.noTests] - Exclude test files - * @param {boolean} [opts.drift] - Drift-only mode (omit community member lists) - * @param {boolean} [opts.json] - JSON output (used by CLI wrapper only) - * @returns {{ communities: object[], modularity: number, drift: object, summary: object }} - */ -export function communitiesData(customDbPath, opts = {}) { +export function communitiesData( + customDbPath?: string, + opts: { + functions?: boolean; + resolution?: number; + noTests?: boolean; + drift?: boolean; + json?: boolean; + config?: CodegraphConfig; + maxLevels?: number; + maxLocalPasses?: number; + refinementTheta?: number; + limit?: number; + offset?: number; + repo?: Repository; + } = {}, +): Record { const { repo, close } = openRepo(customDbPath, opts); - let graph; + let graph: CodeGraph; try { graph = buildDependencyGraph(repo, { fileLevel: !opts.functions, @@ -154,7 +177,6 @@ export function communitiesData(customDbPath, opts = {}) { resolution, maxLevels, maxLocalPasses, - refinementTheta, }); const { communities, communityDirs } = buildCommunityObjects(graph, assignments, opts); @@ -174,15 +196,12 @@ export function communitiesData(customDbPath, opts = {}) { return paginateResult(base, 'communities', { limit: opts.limit, offset: opts.offset }); } -/** - * Lightweight summary for stats integration. - * - * @param {string} [customDbPath] - * @param {object} [opts] - * @param {boolean} [opts.noTests] - * @returns {{ communityCount: number, modularity: number, driftScore: number }} - */ -export function communitySummaryForStats(customDbPath, opts = {}) { - const data = communitiesData(customDbPath, { ...opts, drift: true }); +export function communitySummaryForStats( + customDbPath?: string, + opts: { noTests?: boolean; repo?: Repository } = {}, +): { communityCount: number; modularity: number; driftScore: number } { + const data = communitiesData(customDbPath, { ...opts, drift: true }) as { + summary: { communityCount: number; modularity: number; driftScore: number }; + }; return data.summary; } diff --git a/src/features/complexity.js b/src/features/complexity.ts similarity index 71% rename from src/features/complexity.js rename to src/features/complexity.ts index a4a805ec..a581be4a 100644 --- a/src/features/complexity.js +++ b/src/features/complexity.ts @@ -18,6 +18,14 @@ import { loadConfig } from '../infrastructure/config.js'; import { debug, info } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { + BetterSqlite3Database, + CodegraphConfig, + ComplexityRules, + HalsteadRules, + LOCMetrics, + TreeSitterNode, +} from '../types.js'; // Re-export rules for backward compatibility export { COMPLEXITY_RULES, HALSTEAD_RULES }; @@ -27,36 +35,45 @@ const COMPLEXITY_EXTENSIONS = buildExtensionSet(COMPLEXITY_RULES); // ─── Halstead Metrics Computation ───────────────────────────────────────── -/** - * Compute Halstead metrics for a function's AST subtree. - * - * @param {object} functionNode - tree-sitter node for the function - * @param {string} language - Language ID - * @returns {{ n1: number, n2: number, bigN1: number, bigN2: number, vocabulary: number, length: number, volume: number, difficulty: number, effort: number, bugs: number } | null} - */ -export function computeHalsteadMetrics(functionNode, language) { - const rules = HALSTEAD_RULES.get(language); +interface HalsteadMetrics { + n1: number; + n2: number; + bigN1: number; + bigN2: number; + vocabulary: number; + length: number; + volume: number; + difficulty: number; + effort: number; + bugs: number; +} + +export function computeHalsteadMetrics( + functionNode: TreeSitterNode, + language: string, +): HalsteadMetrics | null { + const rules = HALSTEAD_RULES.get(language) as HalsteadRules | undefined; if (!rules) return null; - const operators = new Map(); // type -> count - const operands = new Map(); // text -> count + const operators = new Map(); // type -> count + const operands = new Map(); // text -> count - function walk(node) { + function walk(node: TreeSitterNode | null): void { if (!node) return; // Skip type annotation subtrees - if (rules.skipTypes.has(node.type)) return; + if (rules!.skipTypes.has(node.type)) return; // Compound operators (non-leaf): count the node type as an operator - if (rules.compoundOperators.has(node.type)) { + if (rules!.compoundOperators.has(node.type)) { operators.set(node.type, (operators.get(node.type) || 0) + 1); } // Leaf nodes: classify as operator or operand if (node.childCount === 0) { - if (rules.operatorLeafTypes.has(node.type)) { + if (rules!.operatorLeafTypes.has(node.type)) { operators.set(node.type, (operators.get(node.type) || 0) + 1); - } else if (rules.operandLeafTypes.has(node.type)) { + } else if (rules!.operandLeafTypes.has(node.type)) { const text = node.text; operands.set(text, (operands.get(text) || 0) + 1); } @@ -109,23 +126,18 @@ export const computeMaintainabilityIndex = _computeMaintainabilityIndex; // ─── Algorithm: Single-Traversal DFS ────────────────────────────────────── -/** - * Compute cognitive complexity, cyclomatic complexity, and max nesting depth - * for a function's AST subtree in a single DFS walk. - * - * @param {object} functionNode - tree-sitter node for the function body - * @param {string} language - Language ID (e.g. 'javascript', 'typescript') - * @returns {{ cognitive: number, cyclomatic: number, maxNesting: number } | null} - */ -export function computeFunctionComplexity(functionNode, language) { - const rules = COMPLEXITY_RULES.get(language); +export function computeFunctionComplexity( + functionNode: TreeSitterNode, + language: string, +): { cognitive: number; cyclomatic: number; maxNesting: number } | null { + const rules = COMPLEXITY_RULES.get(language) as ComplexityRules | undefined; if (!rules) return null; let cognitive = 0; let cyclomatic = 1; // McCabe starts at 1 let maxNesting = 0; - function walk(node, nestingLevel, isTopFunction) { + function walk(node: TreeSitterNode | null, nestingLevel: number, isTopFunction: boolean): void { if (!node) return; const type = node.type; @@ -134,9 +146,9 @@ export function computeFunctionComplexity(functionNode, language) { if (nestingLevel > maxNesting) maxNesting = nestingLevel; // Handle logical operators in binary expressions - if (type === rules.logicalNodeType) { + if (type === rules!.logicalNodeType) { const op = node.child(1)?.type; - if (op && rules.logicalOperators.has(op)) { + if (op && rules!.logicalOperators.has(op)) { // Cyclomatic: +1 for every logical operator cyclomatic++; @@ -144,7 +156,7 @@ export function computeFunctionComplexity(functionNode, language) { // Walk up to check if parent is same type with same operator const parent = node.parent; let sameSequence = false; - if (parent && parent.type === rules.logicalNodeType) { + if (parent && parent.type === rules!.logicalNodeType) { const parentOp = parent.child(1)?.type; if (parentOp === op) { sameSequence = true; @@ -163,16 +175,16 @@ export function computeFunctionComplexity(functionNode, language) { } // Handle optional chaining (cyclomatic only) - if (type === rules.optionalChainType) { + if (type === rules!.optionalChainType) { cyclomatic++; } // Handle branch/control flow nodes (skip keyword leaf tokens like Ruby's `if`) - if (rules.branchNodes.has(type) && node.childCount > 0) { + if (rules!.branchNodes.has(type) && node.childCount > 0) { // Pattern A: else clause wraps if (JS/C#/Rust) - if (rules.elseNodeType && type === rules.elseNodeType) { + if (rules!.elseNodeType && type === rules!.elseNodeType) { const firstChild = node.namedChild(0); - if (firstChild && firstChild.type === rules.ifNodeType) { + if (firstChild && firstChild.type === rules!.ifNodeType) { // else-if: the if_statement child handles its own increment for (let i = 0; i < node.childCount; i++) { walk(node.child(i), nestingLevel, false); @@ -188,7 +200,7 @@ export function computeFunctionComplexity(functionNode, language) { } // Pattern B: explicit elif node (Python/Ruby/PHP) - if (rules.elifNodeType && type === rules.elifNodeType) { + if (rules!.elifNodeType && type === rules!.elifNodeType) { cognitive++; cyclomatic++; for (let i = 0; i < node.childCount; i++) { @@ -199,15 +211,15 @@ export function computeFunctionComplexity(functionNode, language) { // Detect else-if via Pattern A or C let isElseIf = false; - if (type === rules.ifNodeType) { - if (rules.elseViaAlternative) { + if (type === rules!.ifNodeType) { + if (rules!.elseViaAlternative) { // Pattern C (Go/Java): if_statement is the alternative of parent if_statement isElseIf = - node.parent?.type === rules.ifNodeType && + node.parent?.type === rules!.ifNodeType && node.parent.childForFieldName('alternative')?.id === node.id; - } else if (rules.elseNodeType) { + } else if (rules!.elseNodeType) { // Pattern A (JS/C#/Rust): if_statement inside else_clause - isElseIf = node.parent?.type === rules.elseNodeType; + isElseIf = node.parent?.type === rules!.elseNodeType; } } @@ -225,11 +237,11 @@ export function computeFunctionComplexity(functionNode, language) { cyclomatic++; // Switch-like nodes don't add cyclomatic themselves (cases do) - if (rules.switchLikeNodes?.has(type)) { + if (rules!.switchLikeNodes?.has(type)) { cyclomatic--; // Undo the ++ above; cases handle cyclomatic } - if (rules.nestingNodes.has(type)) { + if (rules!.nestingNodes.has(type)) { for (let i = 0; i < node.childCount; i++) { walk(node.child(i), nestingLevel + 1, false); } @@ -239,9 +251,9 @@ export function computeFunctionComplexity(functionNode, language) { // Pattern C plain else: block that is the alternative of an if_statement (Go/Java) if ( - rules.elseViaAlternative && - type !== rules.ifNodeType && - node.parent?.type === rules.ifNodeType && + rules!.elseViaAlternative && + type !== rules!.ifNodeType && + node.parent?.type === rules!.ifNodeType && node.parent.childForFieldName('alternative')?.id === node.id ) { cognitive++; @@ -252,12 +264,12 @@ export function computeFunctionComplexity(functionNode, language) { } // Handle case nodes (cyclomatic only, skip keyword leaves) - if (rules.caseNodes.has(type) && node.childCount > 0) { + if (rules!.caseNodes.has(type) && node.childCount > 0) { cyclomatic++; } // Handle nested function definitions (increase nesting) - if (!isTopFunction && rules.functionNodes.has(type)) { + if (!isTopFunction && rules!.functionNodes.has(type)) { for (let i = 0; i < node.childCount; i++) { walk(node.child(i), nestingLevel + 1, false); } @@ -277,26 +289,26 @@ export function computeFunctionComplexity(functionNode, language) { // ─── Merged Single-Pass Computation ─────────────────────────────────────── -/** - * Compute all metrics (complexity + Halstead + LOC + MI) in a single DFS walk. - * Merges computeFunctionComplexity and computeHalsteadMetrics into one tree - * traversal, avoiding two separate DFS walks per function node at build time. - * LOC is text-based (not tree-based) and computed separately (very cheap). - * - * Now delegates to the complexity visitor via the unified walker. - * - * @param {object} functionNode - tree-sitter node for the function - * @param {string} langId - Language ID (e.g. 'javascript', 'python') - * @returns {{ cognitive: number, cyclomatic: number, maxNesting: number, halstead: object|null, loc: object, mi: number } | null} - */ -export function computeAllMetrics(functionNode, langId) { +interface AllMetricsResult { + cognitive: number; + cyclomatic: number; + maxNesting: number; + halstead: HalsteadMetrics | null; + loc: LOCMetrics; + mi: number; +} + +export function computeAllMetrics( + functionNode: TreeSitterNode, + langId: string, +): AllMetricsResult | null { const cRules = COMPLEXITY_RULES.get(langId); if (!cRules) return null; const hRules = HALSTEAD_RULES.get(langId); const visitor = createComplexityVisitor(cRules, hRules, { langId }); - const nestingNodes = new Set(cRules.nestingNodes); + const nestingNodes = new Set((cRules as ComplexityRules).nestingNodes); // NOTE: do NOT add functionNodes here — in function-level mode the walker // walks a single function node, and adding it to nestingNodeTypes would // inflate context.nestingLevel by +1 for the entire body. @@ -305,7 +317,12 @@ export function computeAllMetrics(functionNode, langId) { nestingNodeTypes: nestingNodes, }); - const rawResult = results.complexity; + const rawResult = results['complexity'] as { + cognitive: number; + cyclomatic: number; + maxNesting: number; + halstead: HalsteadMetrics | null; + }; // The visitor's finish() in function-level mode returns the raw metrics // but without LOC (needs the functionNode text). Compute LOC + MI here. @@ -326,12 +343,30 @@ export function computeAllMetrics(functionNode, langId) { // ─── Build-Time: Compute Metrics for Changed Files ──────────────────────── -/** - * Find the function body node in a parse tree that matches a given line range. - */ export { _findFunctionNode as findFunctionNode }; -async function initWasmParsersIfNeeded(fileSymbols) { +interface FileSymbols { + _tree?: { rootNode: TreeSitterNode } | null; + _langId?: string | null; + definitions: Array<{ + name: string; + kind: string; + line: number; + endLine?: number; + complexity?: { + cognitive: number; + cyclomatic: number; + maxNesting?: number; + maintainabilityIndex?: number; + halstead?: HalsteadMetrics | null; + loc?: LOCMetrics | null; + }; + }>; +} + +async function initWasmParsersIfNeeded( + fileSymbols: Map, +): Promise<{ parsers: unknown; extToLang: Map | null }> { for (const [relPath, symbols] of fileSymbols) { if (!symbols._tree) { const ext = path.extname(relPath).toLowerCase(); @@ -350,7 +385,15 @@ async function initWasmParsersIfNeeded(fileSymbols) { return { parsers: null, extToLang: null }; } -function getTreeForFile(symbols, relPath, rootDir, parsers, extToLang, getParser) { +function getTreeForFile( + symbols: FileSymbols, + relPath: string, + rootDir: string, + parsers: unknown, + extToLang: Map | null, + // biome-ignore lint/suspicious/noExplicitAny: dynamic import from parser.js + getParser: (parsers: any, absPath: string) => any, +): { tree: { rootNode: TreeSitterNode }; langId: string } | null { let tree = symbols._tree; let langId = symbols._langId; @@ -366,11 +409,11 @@ function getTreeForFile(symbols, relPath, rootDir, parsers, extToLang, getParser if (!langId) return null; const absPath = path.join(rootDir, relPath); - let code; + let code: string; try { code = fs.readFileSync(absPath, 'utf-8'); - } catch (e) { - debug(`complexity: cannot read ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`complexity: cannot read ${relPath}: ${(e as Error).message}`); return null; } @@ -379,25 +422,30 @@ function getTreeForFile(symbols, relPath, rootDir, parsers, extToLang, getParser try { tree = parser.parse(code); - } catch (e) { - debug(`complexity: parse failed for ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`complexity: parse failed for ${relPath}: ${(e as Error).message}`); return null; } } - return { tree, langId }; + return tree && langId ? { tree: tree as { rootNode: TreeSitterNode }, langId } : null; } -function upsertPrecomputedComplexity(db, upsert, def, relPath) { +function upsertPrecomputedComplexity( + db: BetterSqlite3Database, + upsert: { run(...params: unknown[]): unknown }, + def: FileSymbols['definitions'][0], + relPath: string, +): number { const nodeId = getFunctionNodeId(db, def.name, relPath, def.line); if (!nodeId) return 0; - const ch = def.complexity.halstead; - const cl = def.complexity.loc; + const ch = def.complexity!.halstead; + const cl = def.complexity!.loc; upsert.run( nodeId, - def.complexity.cognitive, - def.complexity.cyclomatic, - def.complexity.maxNesting ?? 0, + def.complexity!.cognitive, + def.complexity!.cyclomatic, + def.complexity!.maxNesting ?? 0, cl ? cl.loc : 0, cl ? cl.sloc : 0, cl ? cl.commentLines : 0, @@ -411,18 +459,26 @@ function upsertPrecomputedComplexity(db, upsert, def, relPath) { ch ? ch.difficulty : 0, ch ? ch.effort : 0, ch ? ch.bugs : 0, - def.complexity.maintainabilityIndex ?? 0, + def.complexity!.maintainabilityIndex ?? 0, ); return 1; } -function upsertAstComplexity(db, upsert, def, relPath, tree, langId, rules) { +function upsertAstComplexity( + db: BetterSqlite3Database, + upsert: { run(...params: unknown[]): unknown }, + def: FileSymbols['definitions'][0], + relPath: string, + tree: { rootNode: TreeSitterNode } | null, + langId: string | null | undefined, + rules: ComplexityRules | undefined, +): number { if (!tree || !rules) return 0; - const funcNode = _findFunctionNode(tree.rootNode, def.line, def.endLine, rules); + const funcNode = _findFunctionNode(tree.rootNode, def.line, def.endLine ?? def.line, rules); if (!funcNode) return 0; - const metrics = computeAllMetrics(funcNode, langId); + const metrics = computeAllMetrics(funcNode, langId!); if (!metrics) return 0; const nodeId = getFunctionNodeId(db, def.name, relPath, def.line); @@ -452,16 +508,12 @@ function upsertAstComplexity(db, upsert, def, relPath, tree, langId, rules) { return 1; } -/** - * Re-parse changed files with WASM tree-sitter, find function AST subtrees, - * compute complexity, and upsert into function_complexity table. - * - * @param {object} db - open better-sqlite3 database (read-write) - * @param {Map} fileSymbols - Map - * @param {string} rootDir - absolute project root path - * @param {object} [engineOpts] - engine options (unused; always uses WASM for AST) - */ -export async function buildComplexityMetrics(db, fileSymbols, rootDir, _engineOpts) { +export async function buildComplexityMetrics( + db: BetterSqlite3Database, + fileSymbols: Map, + rootDir: string, + _engineOpts?: unknown, +): Promise { const { parsers, extToLang } = await initWasmParsersIfNeeded(fileSymbols); const { getParser } = await import('../domain/parser.js'); @@ -483,7 +535,9 @@ export async function buildComplexityMetrics(db, fileSymbols, rootDir, _engineOp const tree = result ? result.tree : null; const langId = result ? result.langId : null; - const rules = langId ? COMPLEXITY_RULES.get(langId) : null; + const rules = langId + ? (COMPLEXITY_RULES.get(langId) as ComplexityRules | undefined) + : undefined; for (const def of symbols.definitions) { if (def.kind !== 'function' && def.kind !== 'method') continue; @@ -507,21 +561,38 @@ export async function buildComplexityMetrics(db, fileSymbols, rootDir, _engineOp // ─── Query-Time Functions ───────────────────────────────────────────────── -/** - * Return structured complexity data for querying. - * - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - Options - * @param {string} [opts.target] - Function name filter (partial match) - * @param {number} [opts.limit] - Max results (default: 20) - * @param {string} [opts.sort] - Sort by: cognitive | cyclomatic | nesting (default: cognitive) - * @param {boolean} [opts.aboveThreshold] - Only functions above warn thresholds - * @param {string} [opts.file] - Filter by file (partial match) - * @param {string} [opts.kind] - Filter by symbol kind - * @param {boolean} [opts.noTests] - Exclude test files - * @returns {{ functions: object[], summary: object, thresholds: object }} - */ -export function complexityData(customDbPath, opts = {}) { +interface ComplexityRow { + name: string; + kind: string; + file: string; + line: number; + end_line: number | null; + cognitive: number; + cyclomatic: number; + max_nesting: number; + loc: number; + sloc: number; + maintainability_index: number; + halstead_volume: number; + halstead_difficulty: number; + halstead_effort: number; + halstead_bugs: number; +} + +export function complexityData( + customDbPath?: string, + opts: { + target?: string; + limit?: number; + sort?: string; + aboveThreshold?: boolean; + file?: string; + kind?: string; + noTests?: boolean; + config?: CodegraphConfig; + offset?: number; + } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); try { const sort = opts.sort || 'cognitive'; @@ -533,7 +604,8 @@ export function complexityData(customDbPath, opts = {}) { // Load thresholds from config const config = opts.config || loadConfig(process.cwd()); - const thresholds = config.manifesto?.rules || { + // biome-ignore lint/suspicious/noExplicitAny: thresholds come from config with dynamic keys + const thresholds: any = config.manifesto?.rules || { cognitive: { warn: 15, fail: null }, cyclomatic: { warn: 10, fail: null }, maxNesting: { warn: 4, fail: null }, @@ -542,7 +614,7 @@ export function complexityData(customDbPath, opts = {}) { // Build query let where = "WHERE n.kind IN ('function','method')"; - const params = []; + const params: unknown[] = []; if (noTests) { where += ` AND n.file NOT LIKE '%.test.%' @@ -556,7 +628,7 @@ export function complexityData(customDbPath, opts = {}) { params.push(`%${target}%`); } { - const fc = buildFileConditionSQL(fileFilter, 'n.file'); + const fc = buildFileConditionSQL(fileFilter as string, 'n.file'); where += fc.sql; params.push(...fc.params); } @@ -565,11 +637,12 @@ export function complexityData(customDbPath, opts = {}) { params.push(kindFilter); } - const isValidThreshold = (v) => typeof v === 'number' && Number.isFinite(v); + const isValidThreshold = (v: unknown): v is number => + typeof v === 'number' && Number.isFinite(v); let having = ''; if (aboveThreshold) { - const conditions = []; + const conditions: string[] = []; if (isValidThreshold(thresholds.cognitive?.warn)) { conditions.push(`fc.cognitive >= ${thresholds.cognitive.warn}`); } @@ -589,7 +662,7 @@ export function complexityData(customDbPath, opts = {}) { } } - const orderMap = { + const orderMap: Record = { cognitive: 'fc.cognitive DESC', cyclomatic: 'fc.cyclomatic DESC', nesting: 'fc.max_nesting DESC', @@ -601,10 +674,10 @@ export function complexityData(customDbPath, opts = {}) { }; const orderBy = orderMap[sort] || 'fc.cognitive DESC'; - let rows; + let rows: ComplexityRow[]; try { rows = db - .prepare( + .prepare( `SELECT n.name, n.kind, n.file, n.line, n.end_line, fc.cognitive, fc.cyclomatic, fc.max_nesting, fc.loc, fc.sloc, fc.maintainability_index, @@ -615,14 +688,14 @@ export function complexityData(customDbPath, opts = {}) { ORDER BY ${orderBy}`, ) .all(...params); - } catch (e) { - debug(`complexity query failed (table may not exist): ${e.message}`); + } catch (e: unknown) { + debug(`complexity query failed (table may not exist): ${(e as Error).message}`); // Check if graph has nodes even though complexity table is missing/empty let hasGraph = false; try { - hasGraph = db.prepare('SELECT COUNT(*) as c FROM nodes').get().c > 0; - } catch (e2) { - debug(`nodes table check failed: ${e2.message}`); + hasGraph = db.prepare<{ c: number }>('SELECT COUNT(*) as c FROM nodes').get()!.c > 0; + } catch (e2: unknown) { + debug(`nodes table check failed: ${(e2 as Error).message}`); } return { functions: [], summary: null, thresholds, hasGraph }; } @@ -631,23 +704,23 @@ export function complexityData(customDbPath, opts = {}) { const filtered = noTests ? rows.filter((r) => !isTestFile(r.file)) : rows; const functions = filtered.map((r) => { - const exceeds = []; - if (isValidThreshold(thresholds.cognitive?.warn) && r.cognitive >= thresholds.cognitive.warn) + const exceeds: string[] = []; + if (isValidThreshold(thresholds.cognitive?.warn) && r.cognitive >= thresholds.cognitive.warn!) exceeds.push('cognitive'); if ( isValidThreshold(thresholds.cyclomatic?.warn) && - r.cyclomatic >= thresholds.cyclomatic.warn + r.cyclomatic >= thresholds.cyclomatic.warn! ) exceeds.push('cyclomatic'); if ( isValidThreshold(thresholds.maxNesting?.warn) && - r.max_nesting >= thresholds.maxNesting.warn + r.max_nesting >= thresholds.maxNesting.warn! ) exceeds.push('maxNesting'); if ( isValidThreshold(thresholds.maintainabilityIndex?.warn) && r.maintainability_index > 0 && - r.maintainability_index <= thresholds.maintainabilityIndex.warn + r.maintainability_index <= thresholds.maintainabilityIndex.warn! ) exceeds.push('maintainabilityIndex'); @@ -674,10 +747,15 @@ export function complexityData(customDbPath, opts = {}) { }); // Summary stats - let summary = null; + let summary: Record | null = null; try { const allRows = db - .prepare( + .prepare<{ + cognitive: number; + cyclomatic: number; + max_nesting: number; + maintainability_index: number; + }>( `SELECT fc.cognitive, fc.cyclomatic, fc.max_nesting, fc.maintainability_index FROM function_complexity fc JOIN nodes n ON fc.node_id = n.id WHERE n.kind IN ('function','method') @@ -700,28 +778,28 @@ export function complexityData(customDbPath, opts = {}) { aboveWarn: allRows.filter( (r) => (isValidThreshold(thresholds.cognitive?.warn) && - r.cognitive >= thresholds.cognitive.warn) || + r.cognitive >= thresholds.cognitive.warn!) || (isValidThreshold(thresholds.cyclomatic?.warn) && - r.cyclomatic >= thresholds.cyclomatic.warn) || + r.cyclomatic >= thresholds.cyclomatic.warn!) || (isValidThreshold(thresholds.maxNesting?.warn) && - r.max_nesting >= thresholds.maxNesting.warn) || + r.max_nesting >= thresholds.maxNesting.warn!) || (isValidThreshold(thresholds.maintainabilityIndex?.warn) && r.maintainability_index > 0 && - r.maintainability_index <= thresholds.maintainabilityIndex.warn), + r.maintainability_index <= thresholds.maintainabilityIndex.warn!), ).length, }; } - } catch (e) { - debug(`complexity summary query failed: ${e.message}`); + } catch (e: unknown) { + debug(`complexity summary query failed: ${(e as Error).message}`); } // When summary is null (no complexity rows), check if graph has nodes let hasGraph = false; if (summary === null) { try { - hasGraph = db.prepare('SELECT COUNT(*) as c FROM nodes').get().c > 0; - } catch (e) { - debug(`nodes table check failed: ${e.message}`); + hasGraph = db.prepare<{ c: number }>('SELECT COUNT(*) as c FROM nodes').get()!.c > 0; + } catch (e: unknown) { + debug(`nodes table check failed: ${(e as Error).message}`); } } @@ -732,25 +810,47 @@ export function complexityData(customDbPath, opts = {}) { } } -/** - * Generator: stream complexity rows one-by-one using .iterate() for memory efficiency. - * @param {string} [customDbPath] - * @param {object} [opts] - * @param {boolean} [opts.noTests] - * @param {string} [opts.file] - * @param {string} [opts.target] - * @param {string} [opts.kind] - * @param {string} [opts.sort] - * @yields {{ name: string, kind: string, file: string, line: number, cognitive: number, cyclomatic: number, maxNesting: number, loc: number, sloc: number }} - */ -export function* iterComplexity(customDbPath, opts = {}) { +interface IterComplexityRow { + name: string; + kind: string; + file: string; + line: number; + end_line: number | null; + cognitive: number; + cyclomatic: number; + max_nesting: number; + loc: number; + sloc: number; +} + +export function* iterComplexity( + customDbPath?: string, + opts: { + noTests?: boolean; + file?: string; + target?: string; + kind?: string; + sort?: string; + } = {}, +): Generator<{ + name: string; + kind: string; + file: string; + line: number; + endLine: number | null; + cognitive: number; + cyclomatic: number; + maxNesting: number; + loc: number; + sloc: number; +}> { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; const sort = opts.sort || 'cognitive'; let where = "WHERE n.kind IN ('function','method')"; - const params = []; + const params: unknown[] = []; if (noTests) { where += ` AND n.file NOT LIKE '%.test.%' @@ -764,7 +864,7 @@ export function* iterComplexity(customDbPath, opts = {}) { params.push(`%${opts.target}%`); } { - const fc = buildFileConditionSQL(opts.file, 'n.file'); + const fc = buildFileConditionSQL(opts.file as string, 'n.file'); where += fc.sql; params.push(...fc.params); } @@ -773,7 +873,7 @@ export function* iterComplexity(customDbPath, opts = {}) { params.push(opts.kind); } - const orderMap = { + const orderMap: Record = { cognitive: 'fc.cognitive DESC', cyclomatic: 'fc.cyclomatic DESC', nesting: 'fc.max_nesting DESC', @@ -785,7 +885,7 @@ export function* iterComplexity(customDbPath, opts = {}) { }; const orderBy = orderMap[sort] || 'fc.cognitive DESC'; - const stmt = db.prepare( + const stmt = db.prepare( `SELECT n.name, n.kind, n.file, n.line, n.end_line, fc.cognitive, fc.cyclomatic, fc.max_nesting, fc.loc, fc.sloc FROM function_complexity fc diff --git a/src/features/dataflow.js b/src/features/dataflow.ts similarity index 64% rename from src/features/dataflow.js rename to src/features/dataflow.ts index 2dee25b6..c1831674 100644 --- a/src/features/dataflow.js +++ b/src/features/dataflow.ts @@ -24,6 +24,7 @@ import { ALL_SYMBOL_KINDS, normalizeSymbol } from '../domain/queries.js'; import { debug, info } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database, NodeRow, TreeSitterNode } from '../types.js'; import { findNodes } from './shared/find-nodes.js'; // Re-export for backward compatibility @@ -35,32 +36,45 @@ export const DATAFLOW_EXTENSIONS = buildExtensionSet(DATAFLOW_RULES); // ── extractDataflow ────────────────────────────────────────────────────────── -/** - * Extract dataflow information from a parsed AST. - * Delegates to the dataflow visitor via the unified walker. - * - * @param {object} tree - tree-sitter parse tree - * @param {string} filePath - relative file path - * @param {object[]} definitions - symbol definitions from the parser - * @param {string} [langId='javascript'] - language identifier for rules lookup - * @returns {{ parameters, returns, assignments, argFlows, mutations }} - */ -export function extractDataflow(tree, _filePath, _definitions, langId = 'javascript') { +interface DataflowResult { + parameters: unknown[]; + returns: unknown[]; + assignments: unknown[]; + argFlows: unknown[]; + mutations: unknown[]; +} + +export function extractDataflow( + tree: { rootNode: TreeSitterNode }, + _filePath: string, + _definitions: unknown[], + langId = 'javascript', +): DataflowResult { const rules = DATAFLOW_RULES.get(langId); if (!rules) return { parameters: [], returns: [], assignments: [], argFlows: [], mutations: [] }; const visitor = createDataflowVisitor(rules); const results = walkWithVisitors(tree.rootNode, [visitor], langId, { - functionNodeTypes: rules.functionNodes, + functionNodeTypes: (rules as { functionNodes: Set }).functionNodes, getFunctionName: () => null, // dataflow visitor handles its own name extraction }); - return results.dataflow; + return results['dataflow'] as DataflowResult; } // ── Build-Time Helpers ────────────────────────────────────────────────────── -async function initDataflowParsers(fileSymbols) { +interface FileSymbolsDataflow { + _tree?: { rootNode: TreeSitterNode } | null; + _langId?: string | null; + definitions: Array<{ name: string; kind: string; line: number }>; + dataflow?: DataflowResult | null; +} + +async function initDataflowParsers( + fileSymbols: Map, + // biome-ignore lint/suspicious/noExplicitAny: dynamic import from parser.js +): Promise<{ parsers: unknown; getParserFn: ((parsers: any, absPath: string) => any) | null }> { let needsFallback = false; for (const [relPath, symbols] of fileSymbols) { @@ -73,8 +87,9 @@ async function initDataflowParsers(fileSymbols) { } } - let parsers = null; - let getParserFn = null; + let parsers: unknown = null; + // biome-ignore lint/suspicious/noExplicitAny: dynamic import from parser.js + let getParserFn: ((parsers: any, absPath: string) => any) | null = null; if (needsFallback) { const { createParsers } = await import('../domain/parser.js'); @@ -86,7 +101,15 @@ async function initDataflowParsers(fileSymbols) { return { parsers, getParserFn }; } -function getDataflowForFile(symbols, relPath, rootDir, extToLang, parsers, getParserFn) { +function getDataflowForFile( + symbols: FileSymbolsDataflow, + relPath: string, + rootDir: string, + extToLang: Map, + parsers: unknown, + // biome-ignore lint/suspicious/noExplicitAny: dynamic import from parser.js + getParserFn: ((parsers: any, absPath: string) => any) | null, +): DataflowResult | null { if (symbols.dataflow) return symbols.dataflow; let tree = symbols._tree; @@ -99,11 +122,11 @@ function getDataflowForFile(symbols, relPath, rootDir, extToLang, parsers, getPa if (!langId || !DATAFLOW_RULES.has(langId)) return null; const absPath = path.join(rootDir, relPath); - let code; + let code: string; try { code = fs.readFileSync(absPath, 'utf-8'); - } catch (e) { - debug(`dataflow: cannot read ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`dataflow: cannot read ${relPath}: ${(e as Error).message}`); return null; } @@ -112,8 +135,8 @@ function getDataflowForFile(symbols, relPath, rootDir, extToLang, parsers, getPa try { tree = parser.parse(code); - } catch (e) { - debug(`dataflow: parse failed for ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`dataflow: parse failed for ${relPath}: ${(e as Error).message}`); return null; } } @@ -126,13 +149,45 @@ function getDataflowForFile(symbols, relPath, rootDir, extToLang, parsers, getPa if (!DATAFLOW_RULES.has(langId)) return null; - return extractDataflow(tree, relPath, symbols.definitions, langId); + return extractDataflow( + tree as { rootNode: TreeSitterNode }, + relPath, + symbols.definitions, + langId, + ); +} + +interface ArgFlow { + callerFunc: string; + calleeName: string; + argIndex: number; + expression: string; + line: number; + confidence: number; } -function insertDataflowEdges(insert, data, resolveNode) { +interface Assignment { + sourceCallName: string; + callerFunc: string; + expression: string; + line: number; +} + +interface Mutation { + funcName: string; + binding?: { type: string }; + mutatingExpr: string; + line: number; +} + +function insertDataflowEdges( + insert: { run(...params: unknown[]): unknown }, + data: DataflowResult, + resolveNode: (name: string) => { id: number } | null, +): number { let edgeCount = 0; - for (const flow of data.argFlows) { + for (const flow of data.argFlows as ArgFlow[]) { const sourceNode = resolveNode(flow.callerFunc); const targetNode = resolveNode(flow.calleeName); if (sourceNode && targetNode) { @@ -149,7 +204,7 @@ function insertDataflowEdges(insert, data, resolveNode) { } } - for (const assignment of data.assignments) { + for (const assignment of data.assignments as Assignment[]) { const producerNode = resolveNode(assignment.sourceCallName); const consumerNode = resolveNode(assignment.callerFunc); if (producerNode && consumerNode) { @@ -166,7 +221,7 @@ function insertDataflowEdges(insert, data, resolveNode) { } } - for (const mut of data.mutations) { + for (const mut of data.mutations as Mutation[]) { const mutatorNode = resolveNode(mut.funcName); if (mutatorNode && mut.binding?.type === 'param') { insert.run(mutatorNode.id, mutatorNode.id, 'mutates', null, mut.mutatingExpr, mut.line, 1.0); @@ -179,16 +234,12 @@ function insertDataflowEdges(insert, data, resolveNode) { // ── buildDataflowEdges ────────────────────────────────────────────────────── -/** - * Build dataflow edges and insert them into the database. - * Called during graph build when --dataflow is enabled. - * - * @param {object} db - better-sqlite3 database instance - * @param {Map} fileSymbols - map of relPath → symbols - * @param {string} rootDir - absolute root directory - * @param {object} engineOpts - engine options - */ -export async function buildDataflowEdges(db, fileSymbols, rootDir, _engineOpts) { +export async function buildDataflowEdges( + db: BetterSqlite3Database, + fileSymbols: Map, + rootDir: string, + _engineOpts?: unknown, +): Promise { const extToLang = buildExtToLangMap(); const { parsers, getParserFn } = await initDataflowParsers(fileSymbols); @@ -197,12 +248,24 @@ export async function buildDataflowEdges(db, fileSymbols, rootDir, _engineOpts) VALUES (?, ?, ?, ?, ?, ?, ?)`, ); - const getNodeByNameAndFile = db.prepare( + const getNodeByNameAndFile = db.prepare<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + }>( `SELECT id, name, kind, file, line FROM nodes WHERE name = ? AND file = ? AND kind IN ('function', 'method')`, ); - const getNodeByName = db.prepare( + const getNodeByName = db.prepare<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + }>( `SELECT id, name, kind, file, line FROM nodes WHERE name = ? AND kind IN ('function', 'method') ORDER BY file, line LIMIT 10`, @@ -218,11 +281,11 @@ export async function buildDataflowEdges(db, fileSymbols, rootDir, _engineOpts) const data = getDataflowForFile(symbols, relPath, rootDir, extToLang, parsers, getParserFn); if (!data) continue; - const resolveNode = (funcName) => { + const resolveNode = (funcName: string): { id: number } | null => { const local = getNodeByNameAndFile.all(funcName, relPath); - if (local.length > 0) return local[0]; + if (local.length > 0) return local[0]!; const global = getNodeByName.all(funcName); - return global.length > 0 ? global[0] : null; + return global.length > 0 ? global[0]! : null; }; totalEdges += insertDataflowEdges(insert, data, resolveNode); @@ -237,15 +300,11 @@ export async function buildDataflowEdges(db, fileSymbols, rootDir, _engineOpts) // findNodes imported from ./shared/find-nodes.js -/** - * Return all dataflow edges for a symbol. - * - * @param {string} name - symbol name (partial match) - * @param {string} [customDbPath] - path to graph.db - * @param {object} [opts] - { noTests, file, kind, limit, offset } - * @returns {{ name, results: object[] }} - */ -export function dataflowData(name, customDbPath, opts = {}) { +export function dataflowData( + name: string, + customDbPath?: string, + opts: { noTests?: boolean; file?: string; kind?: string; limit?: number; offset?: number } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -263,7 +322,7 @@ export function dataflowData(name, customDbPath, opts = {}) { db, name, { noTests, file: opts.file, kind: opts.kind }, - ALL_SYMBOL_KINDS, + ALL_SYMBOL_KINDS as unknown as string[], ); if (nodes.length === 0) { return { name, results: [] }; @@ -300,60 +359,79 @@ export function dataflowData(name, customDbPath, opts = {}) { WHERE d.target_id = ? AND d.kind = 'mutates'`, ); - const hc = new Map(); - const results = nodes.map((node) => { + const hc = new Map(); + const results = nodes.map((node: NodeRow) => { const sym = normalizeSymbol(node, db, hc); - const flowsTo = flowsToOut.all(node.id).map((r) => ({ - target: r.target_name, - kind: r.target_kind, - file: r.target_file, - line: r.line, - paramIndex: r.param_index, - expression: r.expression, - confidence: r.confidence, - })); - - const flowsFrom = flowsToIn.all(node.id).map((r) => ({ - source: r.source_name, - kind: r.source_kind, - file: r.source_file, - line: r.line, - paramIndex: r.param_index, - expression: r.expression, - confidence: r.confidence, - })); - - const returnConsumers = returnsOut.all(node.id).map((r) => ({ - consumer: r.target_name, - kind: r.target_kind, - file: r.target_file, - line: r.line, - expression: r.expression, - })); - - const returnedBy = returnsIn.all(node.id).map((r) => ({ - producer: r.source_name, - kind: r.source_kind, - file: r.source_file, - line: r.line, - expression: r.expression, - })); - - const mutatesTargets = mutatesOut.all(node.id).map((r) => ({ - target: r.target_name, - expression: r.expression, - line: r.line, - })); - - const mutatedBy = mutatesIn.all(node.id).map((r) => ({ - source: r.source_name, - expression: r.expression, - line: r.line, - })); + const flowsTo = flowsToOut.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + target: r.target_name, + kind: r.target_kind, + file: r.target_file, + line: r.line, + paramIndex: r.param_index, + expression: r.expression, + confidence: r.confidence, + }), + ); + + const flowsFrom = flowsToIn.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + source: r.source_name, + kind: r.source_kind, + file: r.source_file, + line: r.line, + paramIndex: r.param_index, + expression: r.expression, + confidence: r.confidence, + }), + ); + + const returnConsumers = returnsOut.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + consumer: r.target_name, + kind: r.target_kind, + file: r.target_file, + line: r.line, + expression: r.expression, + }), + ); + + const returnedBy = returnsIn.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + producer: r.source_name, + kind: r.source_kind, + file: r.source_file, + line: r.line, + expression: r.expression, + }), + ); + + const mutatesTargets = mutatesOut.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + target: r.target_name, + expression: r.expression, + line: r.line, + }), + ); + + const mutatedBy = mutatesIn.all(node.id).map( + // biome-ignore lint/suspicious/noExplicitAny: raw DB row + (r: any) => ({ + source: r.source_name, + expression: r.expression, + line: r.line, + }), + ); if (noTests) { - const filter = (arr) => arr.filter((r) => !isTestFile(r.file)); + // biome-ignore lint/suspicious/noExplicitAny: raw DB row results + const filter = (arr: any[]) => arr.filter((r: any) => !isTestFile(r.file)); return { ...sym, flowsTo: filter(flowsTo), @@ -383,16 +461,20 @@ export function dataflowData(name, customDbPath, opts = {}) { } } -/** - * BFS through flows_to + returns edges to find how data gets from A to B. - * - * @param {string} from - source symbol name - * @param {string} to - target symbol name - * @param {string} [customDbPath] - * @param {object} [opts] - { noTests, maxDepth, limit, offset } - * @returns {{ from, to, found, hops?, path? }} - */ -export function dataflowPathData(from, to, customDbPath, opts = {}) { +export function dataflowPathData( + from: string, + to: string, + customDbPath?: string, + opts: { + noTests?: boolean; + maxDepth?: number; + fromFile?: string; + toFile?: string; + kind?: string; + limit?: number; + offset?: number; + } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -412,7 +494,7 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { db, from, { noTests, file: opts.fromFile, kind: opts.kind }, - ALL_SYMBOL_KINDS, + ALL_SYMBOL_KINDS as unknown as string[], ); if (fromNodes.length === 0) { return { from, to, found: false, error: `No symbol matching "${from}"` }; @@ -422,17 +504,17 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { db, to, { noTests, file: opts.toFile, kind: opts.kind }, - ALL_SYMBOL_KINDS, + ALL_SYMBOL_KINDS as unknown as string[], ); if (toNodes.length === 0) { return { from, to, found: false, error: `No symbol matching "${to}"` }; } - const sourceNode = fromNodes[0]; - const targetNode = toNodes[0]; + const sourceNode = fromNodes[0] as NodeRow; + const targetNode = toNodes[0] as NodeRow; if (sourceNode.id === targetNode.id) { - const hc = new Map(); + const hc = new Map(); const sym = normalizeSymbol(sourceNode, db, hc); return { from, @@ -450,15 +532,23 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { WHERE d.source_id = ? AND d.kind IN ('flows_to', 'returns')`, ); - const visited = new Set([sourceNode.id]); - const parent = new Map(); + const visited = new Set([sourceNode.id]); + const parent = new Map(); let queue = [sourceNode.id]; let found = false; for (let depth = 1; depth <= maxDepth; depth++) { - const nextQueue = []; + const nextQueue: number[] = []; for (const currentId of queue) { - const neighbors = neighborStmt.all(currentId); + const neighbors = neighborStmt.all(currentId) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + edge_kind: string; + expression: string; + }>; for (const n of neighbors) { if (noTests && isTestFile(n.file)) continue; if (n.id === targetNode.id) { @@ -494,11 +584,11 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { // Reconstruct path const nodeById = db.prepare('SELECT * FROM nodes WHERE id = ?'); - const hc = new Map(); - const pathItems = []; - let cur = targetNode.id; + const hc = new Map(); + const pathItems: Array> = []; + let cur: number | undefined = targetNode.id; while (cur !== undefined) { - const nodeRow = nodeById.get(cur); + const nodeRow = nodeById.get(cur) as NodeRow; const parentInfo = parent.get(cur); pathItems.unshift({ ...normalizeSymbol(nodeRow, db, hc), @@ -507,7 +597,7 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { }); cur = parentInfo?.parentId; if (cur === sourceNode.id) { - const srcRow = nodeById.get(cur); + const srcRow = nodeById.get(cur) as NodeRow; pathItems.unshift({ ...normalizeSymbol(srcRow, db, hc), edgeKind: null, @@ -523,15 +613,18 @@ export function dataflowPathData(from, to, customDbPath, opts = {}) { } } -/** - * Forward BFS through returns edges: "if I change this function's return value, what breaks?" - * - * @param {string} name - symbol name - * @param {string} [customDbPath] - * @param {object} [opts] - { noTests, depth, file, kind, limit, offset } - * @returns {{ name, results: object[] }} - */ -export function dataflowImpactData(name, customDbPath, opts = {}) { +export function dataflowImpactData( + name: string, + customDbPath?: string, + opts: { + noTests?: boolean; + depth?: number; + file?: string; + kind?: string; + limit?: number; + offset?: number; + } = {}, +): Record { const db = openReadonlyOrFail(customDbPath); try { const maxDepth = opts.depth || 5; @@ -550,7 +643,7 @@ export function dataflowImpactData(name, customDbPath, opts = {}) { db, name, { noTests, file: opts.file, kind: opts.kind }, - ALL_SYMBOL_KINDS, + ALL_SYMBOL_KINDS as unknown as string[], ); if (nodes.length === 0) { return { name, results: [] }; @@ -563,23 +656,23 @@ export function dataflowImpactData(name, customDbPath, opts = {}) { WHERE d.source_id = ? AND d.kind = 'returns'`, ); - const hc = new Map(); - const results = nodes.map((node) => { + const hc = new Map(); + const results = nodes.map((node: NodeRow) => { const sym = normalizeSymbol(node, db, hc); - const visited = new Set([node.id]); - const levels = {}; + const visited = new Set([node.id]); + const levels: Record = {}; let frontier = [node.id]; for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; + const nextFrontier: number[] = []; for (const fid of frontier) { - const consumers = consumersStmt.all(fid); + const consumers = consumersStmt.all(fid) as NodeRow[]; for (const c of consumers) { if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { visited.add(c.id); nextFrontier.push(c.id); if (!levels[d]) levels[d] = []; - levels[d].push(normalizeSymbol(c, db, hc)); + levels[d]!.push(normalizeSymbol(c, db, hc)); } } } diff --git a/src/features/flow.js b/src/features/flow.ts similarity index 73% rename from src/features/flow.js rename to src/features/flow.ts index e91e00b8..9b538905 100644 --- a/src/features/flow.js +++ b/src/features/flow.ts @@ -9,32 +9,30 @@ import { openReadonlyOrFail } from '../db/index.js'; import { CORE_SYMBOL_KINDS, findMatchingNodes } from '../domain/queries.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database } from '../types.js'; import { FRAMEWORK_ENTRY_PREFIXES } from './structure.js'; -/** - * Determine the entry point type from a node name based on framework prefixes. - * @param {string} name - * @returns {'route'|'event'|'command'|'exported'|null} - */ -export function entryPointType(name) { +export function entryPointType(name: string): 'route' | 'event' | 'command' | 'exported' | null { for (const prefix of FRAMEWORK_ENTRY_PREFIXES) { if (name.startsWith(prefix)) { - return prefix.slice(0, -1); // 'route:', 'event:', 'command:' → 'route', 'event', 'command' + return prefix.slice(0, -1) as 'route' | 'event' | 'command'; // 'route:', 'event:', 'command:' → 'route', 'event', 'command' } } return null; } -/** - * Query all entry points from the graph, grouped by type. - * Entry points are nodes with framework prefixes or role = 'entry'. - * - * @param {string} [dbPath] - * @param {object} [opts] - * @param {boolean} [opts.noTests] - * @returns {{ entries: object[], byType: object, count: number }} - */ -export function listEntryPointsData(dbPath, opts = {}) { +interface EntryPointRow { + name: string; + kind: string; + file: string; + line: number; + role: string | null; +} + +export function listEntryPointsData( + dbPath?: string, + opts: { noTests?: boolean; limit?: number; offset?: number } = {}, +): Record { const db = openReadonlyOrFail(dbPath); try { const noTests = opts.noTests || false; @@ -44,7 +42,7 @@ export function listEntryPointsData(dbPath, opts = {}) { const prefixParams = FRAMEWORK_ENTRY_PREFIXES.map((p) => `${p}%`); let rows = db - .prepare( + .prepare( `SELECT n.name, n.kind, n.file, n.line, n.role FROM nodes n WHERE ( @@ -67,7 +65,7 @@ export function listEntryPointsData(dbPath, opts = {}) { type: entryPointType(r.name) || (r.role === 'entry' ? 'exported' : null), })); - const byType = {}; + const byType: Record = {}; for (const e of entries) { const t = e.type || 'other'; if (!byType[t]) byType[t] = []; @@ -81,24 +79,44 @@ export function listEntryPointsData(dbPath, opts = {}) { } } -/** - * Forward BFS from a matched node through callees to leaves. - * - * @param {string} name - Node name to trace from (supports partial/prefix-stripped matching) - * @param {string} [dbPath] - * @param {object} [opts] - * @param {number} [opts.depth=10] - * @param {boolean} [opts.noTests] - * @param {string} [opts.file] - * @param {string} [opts.kind] - * @returns {{ entry: object|null, depth: number, steps: object[], leaves: object[], cycles: object[], totalReached: number, truncated: boolean }} - */ -export function flowData(name, dbPath, opts = {}) { +interface CalleeRow { + id: number; + name: string; + kind: string; + file: string; + line: number; + role: string | null; +} + +interface NodeInfo { + name: string; + kind: string; + file: string; + line: number; + role?: string | null; + type?: string; +} + +export function flowData( + name: string, + dbPath?: string, + opts: { + depth?: number; + noTests?: boolean; + file?: string; + kind?: string; + limit?: number; + offset?: number; + } = {}, +): Record { const db = openReadonlyOrFail(dbPath); try { const maxDepth = opts.depth || 10; const noTests = opts.noTests || false; - const flowOpts = { ...opts, kinds: opts.kind ? [opts.kind] : CORE_SYMBOL_KINDS }; + const flowOpts = { + ...opts, + kinds: opts.kind ? [opts.kind] : (CORE_SYMBOL_KINDS as unknown as string[]), + }; // Phase 1: Direct LIKE match on full name (use all 10 core symbol kinds, // not just FUNCTION_KINDS, so flow can trace from interfaces/types/structs/etc.) @@ -125,7 +143,7 @@ export function flowData(name, dbPath, opts = {}) { } const epType = entryPointType(matchNode.name); - const entry = { + const entry: NodeInfo = { name: matchNode.name, kind: matchNode.kind, file: matchNode.file, @@ -135,24 +153,24 @@ export function flowData(name, dbPath, opts = {}) { }; // Forward BFS through callees - const visited = new Set([matchNode.id]); + const visited = new Set([matchNode.id]); let frontier = [matchNode.id]; - const steps = []; - const cycles = []; + const steps: Array<{ depth: number; nodes: NodeInfo[] }> = []; + const cycles: Array<{ from: string; to: string; depth: number }> = []; let truncated = false; // Track which nodes are at each depth and their depth for leaf detection - const nodeDepths = new Map(); - const idToNode = new Map(); + const nodeDepths = new Map(); + const idToNode = new Map(); idToNode.set(matchNode.id, entry); for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; - const levelNodes = []; + const nextFrontier: number[] = []; + const levelNodes: NodeInfo[] = []; for (const fid of frontier) { const callees = db - .prepare( + .prepare( `SELECT DISTINCT n.id, n.name, n.kind, n.file, n.line, n.role FROM edges e JOIN nodes n ON e.target_id = n.id WHERE e.source_id = ? AND e.kind = 'calls'`, @@ -173,7 +191,7 @@ export function flowData(name, dbPath, opts = {}) { visited.add(c.id); nextFrontier.push(c.id); - const nodeInfo = { name: c.name, kind: c.kind, file: c.file, line: c.line }; + const nodeInfo: NodeInfo = { name: c.name, kind: c.kind, file: c.file, line: c.line }; levelNodes.push(nodeInfo); nodeDepths.set(c.id, d); idToNode.set(c.id, nodeInfo); @@ -194,10 +212,10 @@ export function flowData(name, dbPath, opts = {}) { // Identify leaves: visited nodes that have no outgoing 'calls' edges to other visited nodes // (or no outgoing calls at all) - const leaves = []; + const leaves: Array = []; for (const [id, depth] of nodeDepths) { const outgoing = db - .prepare( + .prepare<{ id: number }>( `SELECT DISTINCT n.id FROM edges e JOIN nodes n ON e.target_id = n.id WHERE e.source_id = ? AND e.kind = 'calls'`, diff --git a/src/features/graph-enrichment.js b/src/features/graph-enrichment.ts similarity index 68% rename from src/features/graph-enrichment.js rename to src/features/graph-enrichment.ts index 4d399409..819f4f18 100644 --- a/src/features/graph-enrichment.js +++ b/src/features/graph-enrichment.ts @@ -8,6 +8,7 @@ import { DEFAULT_ROLE_COLORS, } from '../presentation/colors.js'; import { DEFAULT_CONFIG, renderPlotHTML } from '../presentation/viewer.js'; +import type { BetterSqlite3Database } from '../types.js'; // Re-export presentation utilities for backward compatibility export { loadPlotConfig } from '../presentation/viewer.js'; @@ -16,23 +17,95 @@ const DEFAULT_MIN_CONFIDENCE = 0.5; // ─── Data Preparation ───────────────────────────────────────────────── -/** - * Prepare enriched graph data for the HTML viewer. - */ -export function prepareGraphData(db, opts = {}) { +interface PlotConfig { + filter: { + kinds?: string[] | null; + files?: string[] | null; + roles?: string[] | null; + }; + colorBy?: string; + nodeColors: Record; + roleColors: Record; + riskThresholds?: { highBlastRadius?: number; lowMI?: number }; + seedStrategy?: string; + seedCount?: number; + title?: string; +} + +interface VisNode { + id: number | string; + label: string; + title: string; + color: string; + kind: string; + role: string; + file: string; + line: number; + community: number | null; + cognitive: number | null; + cyclomatic: number | null; + maintainabilityIndex: number | null; + fanIn: number; + fanOut: number; + directory: string; + risk: string[]; +} + +interface VisEdge { + id: string; + from: number | string; + to: number | string; +} + +interface GraphData { + nodes: VisNode[]; + edges: VisEdge[]; + seedNodeIds: (number | string)[]; +} + +export function prepareGraphData( + db: BetterSqlite3Database, + opts: { + fileLevel?: boolean; + noTests?: boolean; + minConfidence?: number; + config?: PlotConfig; + } = {}, +): GraphData { const fileLevel = opts.fileLevel !== false; const noTests = opts.noTests || false; const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const cfg = opts.config || DEFAULT_CONFIG; + const cfg = opts.config || (DEFAULT_CONFIG as unknown as PlotConfig); return fileLevel ? prepareFileLevelData(db, noTests, minConf, cfg) : prepareFunctionLevelData(db, noTests, minConf, cfg); } -function prepareFunctionLevelData(db, noTests, minConf, cfg) { +interface FunctionEdgeRow { + source_id: number; + source_name: string; + source_kind: string; + source_file: string; + source_line: number; + source_role: string | null; + target_id: number; + target_name: string; + target_kind: string; + target_file: string; + target_line: number; + target_role: string | null; + edge_kind: string; +} + +function prepareFunctionLevelData( + db: BetterSqlite3Database, + noTests: boolean, + minConf: number, + cfg: PlotConfig, +): GraphData { let edges = db - .prepare( + .prepare( ` SELECT n1.id AS source_id, n1.name AS source_name, n1.kind AS source_kind, n1.file AS source_file, n1.line AS source_line, n1.role AS source_role, @@ -65,7 +138,10 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { ); } - const nodeMap = new Map(); + const nodeMap = new Map< + number, + { id: number; name: string; kind: string; file: string; line: number; role: string | null } + >(); for (const e of edges) { if (!nodeMap.has(e.source_id)) { nodeMap.set(e.source_id, { @@ -92,17 +168,26 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { if (cfg.filter.roles) { const roles = new Set(cfg.filter.roles); for (const [id, n] of nodeMap) { - if (!roles.has(n.role)) nodeMap.delete(id); + if (!roles.has(n.role!)) nodeMap.delete(id); } const nodeIds = new Set(nodeMap.keys()); edges = edges.filter((e) => nodeIds.has(e.source_id) && nodeIds.has(e.target_id)); } // Complexity data - const complexityMap = new Map(); + const complexityMap = new Map< + number, + { cognitive: number; cyclomatic: number; maintainabilityIndex: number } + >(); try { const rows = db - .prepare( + .prepare<{ + node_id: number; + cognitive: number; + cyclomatic: number; + max_nesting: number; + maintainability_index: number; + }>( 'SELECT node_id, cognitive, cyclomatic, max_nesting, maintainability_index FROM function_complexity', ) .all(); @@ -127,24 +212,24 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { } // Use DB-level fan-in/fan-out (counts ALL call edges, not just visible) - const fanInMap = new Map(); - const fanOutMap = new Map(); + const fanInMap = new Map(); + const fanOutMap = new Map(); const fanInRows = db - .prepare( + .prepare<{ node_id: number; fan_in: number }>( "SELECT target_id AS node_id, COUNT(*) AS fan_in FROM edges WHERE kind = 'calls' GROUP BY target_id", ) .all(); for (const r of fanInRows) fanInMap.set(r.node_id, r.fan_in); const fanOutRows = db - .prepare( + .prepare<{ node_id: number; fan_out: number }>( "SELECT source_id AS node_id, COUNT(*) AS fan_out FROM edges WHERE kind = 'calls' GROUP BY source_id", ) .all(); for (const r of fanOutRows) fanOutMap.set(r.node_id, r.fan_out); // Communities (Louvain) via graph subsystem - const communityMap = new Map(); + const communityMap = new Map(); if (nodeMap.size > 0) { try { const { assignments } = louvainCommunities(fnGraph); @@ -155,23 +240,27 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { } // Build enriched nodes - const visNodes = [...nodeMap.values()].map((n) => { + const visNodes: VisNode[] = [...nodeMap.values()].map((n) => { const cx = complexityMap.get(n.id) || null; const fanIn = fanInMap.get(n.id) || 0; const fanOut = fanOutMap.get(n.id) || 0; const community = communityMap.get(n.id) ?? null; const directory = path.dirname(n.file); - const risk = []; + const risk: string[] = []; if (n.role?.startsWith('dead')) risk.push('dead-code'); if (fanIn >= (cfg.riskThresholds?.highBlastRadius ?? 10)) risk.push('high-blast-radius'); if (cx && cx.maintainabilityIndex < (cfg.riskThresholds?.lowMI ?? 40)) risk.push('low-mi'); - const color = + const color: string = cfg.colorBy === 'role' && n.role - ? cfg.roleColors[n.role] || DEFAULT_ROLE_COLORS[n.role] || '#ccc' + ? cfg.roleColors[n.role] || + (DEFAULT_ROLE_COLORS as Record)[n.role] || + '#ccc' : cfg.colorBy === 'community' && community !== null - ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] - : cfg.nodeColors[n.kind] || DEFAULT_NODE_COLORS[n.kind] || '#ccc'; + ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] || '#ccc' + : cfg.nodeColors[n.kind] || + (DEFAULT_NODE_COLORS as Record)[n.kind] || + '#ccc'; return { id: n.id, @@ -193,14 +282,14 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { }; }); - const visEdges = edges.map((e, i) => ({ + const visEdges: VisEdge[] = edges.map((e, i) => ({ id: `e${i}`, from: e.source_id, to: e.target_id, })); // Seed strategy - let seedNodeIds; + let seedNodeIds: (number | string)[]; if (cfg.seedStrategy === 'top-fanin') { const sorted = [...visNodes].sort((a, b) => b.fanIn - a.fanIn); seedNodeIds = sorted.slice(0, cfg.seedCount || 30).map((n) => n.id); @@ -213,9 +302,19 @@ function prepareFunctionLevelData(db, noTests, minConf, cfg) { return { nodes: visNodes, edges: visEdges, seedNodeIds }; } -function prepareFileLevelData(db, noTests, minConf, cfg) { +interface FileLevelEdge { + source: string; + target: string; +} + +function prepareFileLevelData( + db: BetterSqlite3Database, + noTests: boolean, + minConf: number, + cfg: PlotConfig, +): GraphData { let edges = db - .prepare( + .prepare( ` SELECT DISTINCT n1.file AS source, n2.file AS target FROM edges e @@ -228,26 +327,26 @@ function prepareFileLevelData(db, noTests, minConf, cfg) { .all(minConf); if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - const files = new Set(); + const files = new Set(); for (const { source, target } of edges) { files.add(source); files.add(target); } - const fileIds = new Map(); + const fileIds = new Map(); let idx = 0; for (const f of files) fileIds.set(f, idx++); // Fan-in/fan-out - const fanInCount = new Map(); - const fanOutCount = new Map(); + const fanInCount = new Map(); + const fanOutCount = new Map(); for (const { source, target } of edges) { fanOutCount.set(source, (fanOutCount.get(source) || 0) + 1); fanInCount.set(target, (fanInCount.get(target) || 0) + 1); } // Communities via graph subsystem - const communityMap = new Map(); + const communityMap = new Map(); if (files.size > 0) { try { const fileGraph = new CodeGraph(); @@ -263,16 +362,18 @@ function prepareFileLevelData(db, noTests, minConf, cfg) { } } - const visNodes = [...files].map((f) => { - const id = fileIds.get(f); + const visNodes: VisNode[] = [...files].map((f) => { + const id = fileIds.get(f)!; const community = communityMap.get(f) ?? null; const fanIn = fanInCount.get(f) || 0; const fanOut = fanOutCount.get(f) || 0; const directory = path.dirname(f); - const color = + const color: string = cfg.colorBy === 'community' && community !== null - ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] - : cfg.nodeColors.file || DEFAULT_NODE_COLORS.file; + ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] || '#ccc' + : cfg.nodeColors['file'] || + (DEFAULT_NODE_COLORS as Record)['file'] || + '#ccc'; return { id, @@ -294,13 +395,13 @@ function prepareFileLevelData(db, noTests, minConf, cfg) { }; }); - const visEdges = edges.map(({ source, target }, i) => ({ + const visEdges: VisEdge[] = edges.map(({ source, target }, i) => ({ id: `e${i}`, - from: fileIds.get(source), - to: fileIds.get(target), + from: fileIds.get(source)!, + to: fileIds.get(target)!, })); - let seedNodeIds; + let seedNodeIds: (number | string)[]; if (cfg.seedStrategy === 'top-fanin') { const sorted = [...visNodes].sort((a, b) => b.fanIn - a.fanIn); seedNodeIds = sorted.slice(0, cfg.seedCount || 30).map((n) => n.id); @@ -315,13 +416,16 @@ function prepareFileLevelData(db, noTests, minConf, cfg) { // ─── HTML Generation (thin wrapper) ────────────────────────────────── -/** - * Generate a self-contained interactive HTML file with vis-network. - * - * Loads graph data from the DB, then delegates to the presentation layer. - */ -export function generatePlotHTML(db, opts = {}) { - const cfg = opts.config || DEFAULT_CONFIG; +export function generatePlotHTML( + db: BetterSqlite3Database, + opts: { + fileLevel?: boolean; + noTests?: boolean; + minConfidence?: number; + config?: PlotConfig; + } = {}, +): string { + const cfg = opts.config || (DEFAULT_CONFIG as unknown as PlotConfig); const data = prepareGraphData(db, opts); return renderPlotHTML(data, cfg); } diff --git a/src/features/manifesto.js b/src/features/manifesto.ts similarity index 68% rename from src/features/manifesto.js rename to src/features/manifesto.ts index d37fd296..71780f98 100644 --- a/src/features/manifesto.js +++ b/src/features/manifesto.ts @@ -4,17 +4,20 @@ import { findCycles } from '../domain/graph/cycles.js'; import { loadConfig } from '../infrastructure/config.js'; import { debug } from '../infrastructure/logger.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database, CodegraphConfig, ThresholdRule } from '../types.js'; import { evaluateBoundaries } from './boundaries.js'; // ─── Rule Definitions ───────────────────────────────────────────────── -/** - * All supported manifesto rules. - * level: 'function' | 'file' | 'graph' - * metric: DB column or special key - * defaults: { warn, fail } — null means disabled - */ -export const RULE_DEFS = [ +interface RuleDef { + name: string; + level: 'function' | 'file' | 'graph'; + metric: string; + defaults: { warn: number | null; fail: number | null }; + reportOnly?: boolean; +} + +export const RULE_DEFS: RuleDef[] = [ { name: 'cognitive', level: 'function', @@ -74,12 +77,12 @@ const NO_TEST_SQL = ` AND n.file NOT LIKE '%__tests__%' AND n.file NOT LIKE '%.stories.%'`; -/** - * Deep-merge user config with RULE_DEFS defaults per rule. - * mergeConfig in config.js is shallow for nested objects, so we do per-rule merging here. - */ -function resolveRules(userRules) { - const resolved = {}; +interface ResolvedRules { + [name: string]: ThresholdRule; +} + +function resolveRules(userRules?: Record>): ResolvedRules { + const resolved: ResolvedRules = {}; for (const def of RULE_DEFS) { const user = userRules?.[def.name]; resolved[def.name] = { @@ -90,17 +93,27 @@ function resolveRules(userRules) { return resolved; } -/** - * Check if a rule is enabled (has at least one non-null threshold). - */ -function isEnabled(thresholds) { +function isEnabled(thresholds: ThresholdRule): boolean { return thresholds.warn != null || thresholds.fail != null; } -/** - * Check a numeric value against warn/fail thresholds, push violations. - */ -function checkThreshold(rule, thresholds, value, meta, violations) { +interface Violation { + rule: string; + level: string; + value: number; + threshold: number; + name?: string; + file?: string; + line?: number | null; +} + +function checkThreshold( + rule: string, + thresholds: ThresholdRule, + value: number, + meta: { name?: string; file?: string; line?: number | null }, + violations: Violation[], +): 'fail' | 'warn' | 'pass' { if (thresholds.fail != null && value >= thresholds.fail) { violations.push({ rule, @@ -126,16 +139,39 @@ function checkThreshold(rule, thresholds, value, meta, violations) { // ─── Evaluators ─────────────────────────────────────────────────────── -function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { +interface RuleResult { + name: string; + level: string; + status: string; + thresholds: ThresholdRule; + violationCount: number; +} + +interface ManifestoOpts { + noTests?: boolean; + file?: string; + kind?: string; + config?: CodegraphConfig; + limit?: number; + offset?: number; +} + +function evaluateFunctionRules( + db: BetterSqlite3Database, + rules: ResolvedRules, + opts: ManifestoOpts, + violations: Violation[], + ruleResults: RuleResult[], +): void { const functionDefs = RULE_DEFS.filter((d) => d.level === 'function'); - const activeDefs = functionDefs.filter((d) => isEnabled(rules[d.name])); + const activeDefs = functionDefs.filter((d) => isEnabled(rules[d.name]!)); if (activeDefs.length === 0) { for (const def of functionDefs) { ruleResults.push({ name: def.name, level: def.level, status: 'pass', - thresholds: rules[def.name], + thresholds: rules[def.name]!, violationCount: 0, }); } @@ -143,10 +179,10 @@ function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { } let where = "WHERE n.kind IN ('function','method')"; - const params = []; + const params: unknown[] = []; if (opts.noTests) where += NO_TEST_SQL; { - const fc = buildFileConditionSQL(opts.file, 'n.file'); + const fc = buildFileConditionSQL(opts.file as string, 'n.file'); where += fc.sql; params.push(...fc.params); } @@ -155,7 +191,7 @@ function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { params.push(opts.kind); } - let rows; + let rows: Array>; try { rows = db .prepare( @@ -165,15 +201,15 @@ function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { JOIN nodes n ON fc.node_id = n.id ${where}`, ) - .all(...params); - } catch (err) { - debug('manifesto function query failed: %s', err.message); + .all(...params) as Array>; + } catch (err: unknown) { + debug(`manifesto function query failed: ${(err as Error).message}`); rows = []; } // Track worst status per rule - const worst = {}; - const counts = {}; + const worst: Record = {}; + const counts: Record = {}; for (const def of functionDefs) { worst[def.name] = 'pass'; counts[def.name] = 0; @@ -181,12 +217,16 @@ function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { for (const row of rows) { for (const def of activeDefs) { - const value = row[def.metric]; + const value = row[def.metric] as number | null; if (value == null) continue; - const meta = { name: row.name, file: row.file, line: row.line }; - const status = checkThreshold(def.name, rules[def.name], value, meta, violations); + const meta = { + name: row['name'] as string, + file: row['file'] as string, + line: row['line'] as number, + }; + const status = checkThreshold(def.name, rules[def.name]!, value, meta, violations); if (status !== 'pass') { - counts[def.name]++; + counts[def.name] = (counts[def.name] ?? 0) + 1; if (status === 'fail') worst[def.name] = 'fail'; else if (worst[def.name] !== 'fail') worst[def.name] = 'warn'; } @@ -197,23 +237,29 @@ function evaluateFunctionRules(db, rules, opts, violations, ruleResults) { ruleResults.push({ name: def.name, level: def.level, - status: worst[def.name], - thresholds: rules[def.name], - violationCount: counts[def.name], + status: worst[def.name]!, + thresholds: rules[def.name]!, + violationCount: counts[def.name] ?? 0, }); } } -function evaluateFileRules(db, rules, opts, violations, ruleResults) { +function evaluateFileRules( + db: BetterSqlite3Database, + rules: ResolvedRules, + opts: ManifestoOpts, + violations: Violation[], + ruleResults: RuleResult[], +): void { const fileDefs = RULE_DEFS.filter((d) => d.level === 'file'); - const activeDefs = fileDefs.filter((d) => isEnabled(rules[d.name])); + const activeDefs = fileDefs.filter((d) => isEnabled(rules[d.name]!)); if (activeDefs.length === 0) { for (const def of fileDefs) { ruleResults.push({ name: def.name, level: def.level, status: 'pass', - thresholds: rules[def.name], + thresholds: rules[def.name]!, violationCount: 0, }); } @@ -221,15 +267,15 @@ function evaluateFileRules(db, rules, opts, violations, ruleResults) { } let where = "WHERE n.kind = 'file'"; - const params = []; + const params: unknown[] = []; if (opts.noTests) where += NO_TEST_SQL; { - const fc = buildFileConditionSQL(opts.file, 'n.file'); + const fc = buildFileConditionSQL(opts.file as string, 'n.file'); where += fc.sql; params.push(...fc.params); } - let rows; + let rows: Array>; try { rows = db .prepare( @@ -240,14 +286,14 @@ function evaluateFileRules(db, rules, opts, violations, ruleResults) { JOIN nodes n ON nm.node_id = n.id ${where}`, ) - .all(...params); - } catch (err) { - debug('manifesto file query failed: %s', err.message); + .all(...params) as Array>; + } catch (err: unknown) { + debug(`manifesto file query failed: ${(err as Error).message}`); rows = []; } - const worst = {}; - const counts = {}; + const worst: Record = {}; + const counts: Record = {}; for (const def of fileDefs) { worst[def.name] = 'pass'; counts[def.name] = 0; @@ -255,12 +301,16 @@ function evaluateFileRules(db, rules, opts, violations, ruleResults) { for (const row of rows) { for (const def of activeDefs) { - const value = row[def.metric]; + const value = row[def.metric] as number | null; if (value == null) continue; - const meta = { name: row.name, file: row.file, line: row.line }; - const status = checkThreshold(def.name, rules[def.name], value, meta, violations); + const meta = { + name: row['name'] as string, + file: row['file'] as string, + line: row['line'] as number, + }; + const status = checkThreshold(def.name, rules[def.name]!, value, meta, violations); if (status !== 'pass') { - counts[def.name]++; + counts[def.name] = (counts[def.name] ?? 0) + 1; if (status === 'fail') worst[def.name] = 'fail'; else if (worst[def.name] !== 'fail') worst[def.name] = 'warn'; } @@ -271,15 +321,21 @@ function evaluateFileRules(db, rules, opts, violations, ruleResults) { ruleResults.push({ name: def.name, level: def.level, - status: worst[def.name], - thresholds: rules[def.name], - violationCount: counts[def.name], + status: worst[def.name]!, + thresholds: rules[def.name]!, + violationCount: counts[def.name] ?? 0, }); } } -function evaluateGraphRules(db, rules, opts, violations, ruleResults) { - const thresholds = rules.noCycles; +function evaluateGraphRules( + db: BetterSqlite3Database, + rules: ResolvedRules, + opts: ManifestoOpts, + violations: Violation[], + ruleResults: RuleResult[], +): void { + const thresholds = rules['noCycles']!; if (!isEnabled(thresholds)) { ruleResults.push({ name: 'noCycles', @@ -329,14 +385,24 @@ function evaluateGraphRules(db, rules, opts, violations, ruleResults) { }); } -function evaluateBoundaryRules(db, rules, config, opts, violations, ruleResults) { - const thresholds = rules.boundaries; +function evaluateBoundaryRules( + db: BetterSqlite3Database, + rules: ResolvedRules, + config: CodegraphConfig, + opts: ManifestoOpts, + violations: Violation[], + ruleResults: RuleResult[], +): void { + const thresholds = rules['boundaries']!; const boundaryConfig = config.manifesto?.boundaries; // Auto-enable at warn level when boundary config exists but threshold not set - const effectiveThresholds = { ...thresholds }; + const effectiveThresholds: ThresholdRule = { + warn: thresholds.warn ?? null, + fail: thresholds.fail ?? null, + }; if (boundaryConfig && !isEnabled(thresholds)) { - effectiveThresholds.warn = true; + effectiveThresholds.warn = true as unknown as number; } if (!isEnabled(effectiveThresholds) || !boundaryConfig) { @@ -384,25 +450,20 @@ function evaluateBoundaryRules(db, rules, config, opts, violations, ruleResults) // ─── Public API ─────────────────────────────────────────────────────── -/** - * Evaluate all manifesto rules and return structured results. - * - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - Options - * @param {boolean} [opts.noTests] - Exclude test files - * @param {string} [opts.file] - Filter by file (partial match) - * @param {string} [opts.kind] - Filter by symbol kind - * @returns {{ rules: object[], violations: object[], summary: object, passed: boolean }} - */ -export function manifestoData(customDbPath, opts = {}) { +export function manifestoData( + customDbPath?: string, + opts: ManifestoOpts = {}, +): Record { const db = openReadonlyOrFail(customDbPath); try { const config = opts.config || loadConfig(process.cwd()); - const rules = resolveRules(config.manifesto?.rules); + const rules = resolveRules( + config.manifesto?.rules as unknown as Record>, + ); - const violations = []; - const ruleResults = []; + const violations: Violation[] = []; + const ruleResults: RuleResult[] = []; evaluateFunctionRules(db, rules, opts, violations, ruleResults); evaluateFileRules(db, rules, opts, violations, ruleResults); diff --git a/src/features/owners.js b/src/features/owners.ts similarity index 72% rename from src/features/owners.js rename to src/features/owners.ts index 3608d947..5c278ce6 100644 --- a/src/features/owners.js +++ b/src/features/owners.ts @@ -8,16 +8,21 @@ import { isTestFile } from '../infrastructure/test-filter.js'; const CODEOWNERS_PATHS = ['CODEOWNERS', '.github/CODEOWNERS', 'docs/CODEOWNERS']; -/** @type {Map} */ -const codeownersCache = new Map(); - -/** - * Find and parse a CODEOWNERS file from the standard locations. - * Results are cached per rootDir and invalidated when the file's mtime changes. - * @param {string} rootDir - Repository root directory - * @returns {{ rules: Array<{pattern: string, owners: string[], regex: RegExp}>, path: string } | null} - */ -export function parseCodeowners(rootDir) { +interface CodeownersRule { + pattern: string; + owners: string[]; + regex: RegExp; +} + +interface CodeownersCache { + rules: CodeownersRule[]; + path: string; + mtime: number; +} + +const codeownersCache = new Map(); + +export function parseCodeowners(rootDir: string): { rules: CodeownersRule[]; path: string } | null { const cached = codeownersCache.get(rootDir); for (const rel of CODEOWNERS_PATHS) { @@ -37,24 +42,18 @@ export function parseCodeowners(rootDir) { return null; } -/** Clear the parseCodeowners cache (for testing). */ -export function clearCodeownersCache() { +export function clearCodeownersCache(): void { codeownersCache.clear(); } -/** - * Parse CODEOWNERS file content into rules. - * @param {string} content - Raw CODEOWNERS file content - * @returns {Array<{pattern: string, owners: string[], regex: RegExp}>} - */ -export function parseCodeownersContent(content) { - const rules = []; +export function parseCodeownersContent(content: string): CodeownersRule[] { + const rules: CodeownersRule[] = []; for (const raw of content.split('\n')) { const line = raw.trim(); if (!line || line.startsWith('#')) continue; const parts = line.split(/\s+/); if (parts.length < 2) continue; - const pattern = parts[0]; + const pattern = parts[0]!; const owners = parts.slice(1).filter((p) => p.startsWith('@') || /^[^@\s]+@[^@\s]+$/.test(p)); if (owners.length === 0) continue; rules.push({ pattern, owners, regex: patternToRegex(pattern) }); @@ -62,17 +61,7 @@ export function parseCodeownersContent(content) { return rules; } -/** - * Convert a CODEOWNERS glob pattern to a RegExp. - * - * CODEOWNERS semantics: - * - Leading `/` anchors to repo root; without it, matches anywhere - * - `*` matches anything except `/` - * - `**` matches everything including `/` - * - Trailing `/` matches directory contents - * - A bare filename like `Makefile` matches anywhere - */ -export function patternToRegex(pattern) { +export function patternToRegex(pattern: string): RegExp { let p = pattern; const anchored = p.startsWith('/'); if (anchored) p = p.slice(1); @@ -112,14 +101,8 @@ export function patternToRegex(pattern) { return new RegExp(regex); } -/** - * Find the owners for a file path. CODEOWNERS uses last-match-wins semantics. - * @param {string} filePath - Relative file path (forward slashes) - * @param {Array<{pattern: string, owners: string[], regex: RegExp}>} rules - * @returns {string[]} - */ -export function matchOwners(filePath, rules) { - let owners = []; +export function matchOwners(filePath: string, rules: CodeownersRule[]): string[] { + let owners: string[] = []; for (const rule of rules) { if (rule.regex.test(filePath)) { owners = rule.owners; @@ -130,19 +113,15 @@ export function matchOwners(filePath, rules) { // ─── Data Functions ────────────────────────────────────────────────── -/** - * Lightweight helper for diff-impact integration. - * Returns owner mapping for a list of file paths. - * @param {string[]} filePaths - Relative file paths - * @param {string} repoRoot - Repository root directory - * @returns {{ owners: Map, affectedOwners: string[], suggestedReviewers: string[] }} - */ -export function ownersForFiles(filePaths, repoRoot) { +export function ownersForFiles( + filePaths: string[], + repoRoot: string, +): { owners: Map; affectedOwners: string[]; suggestedReviewers: string[] } { const parsed = parseCodeowners(repoRoot); if (!parsed) return { owners: new Map(), affectedOwners: [], suggestedReviewers: [] }; - const ownersMap = new Map(); - const ownerSet = new Set(); + const ownersMap = new Map(); + const ownerSet = new Set(); for (const file of filePaths) { const fileOwners = matchOwners(file, parsed.rules); ownersMap.set(file, fileOwners); @@ -152,17 +131,35 @@ export function ownersForFiles(filePaths, repoRoot) { return { owners: ownersMap, affectedOwners, suggestedReviewers: affectedOwners }; } -/** - * Full ownership data for the graph. - * @param {string} [customDbPath] - * @param {object} [opts] - * @param {string} [opts.owner] - Filter to a specific owner - * @param {string} [opts.file] - Filter by partial file path - * @param {string} [opts.kind] - Filter by symbol kind - * @param {boolean} [opts.noTests] - Exclude test files - * @param {boolean} [opts.boundary] - Show cross-owner boundary edges - */ -export function ownersData(customDbPath, opts = {}) { +interface OwnersDataOpts { + owner?: string; + file?: string; + kind?: string; + noTests?: boolean; + boundary?: boolean; +} + +export function ownersData( + customDbPath?: string, + opts: OwnersDataOpts = {}, +): { + codeownersFile: string | null; + files: { file: string; owners: string[] }[]; + symbols: { name: string; kind: string; file: string; line: number; owners: string[] }[]; + boundaries: { + from: { name: string; kind: string; file: string; line: number; owners: string[] }; + to: { name: string; kind: string; file: string; line: number; owners: string[] }; + edgeKind: string; + }[]; + summary: { + totalFiles: number; + ownedFiles: number; + unownedFiles: number; + coveragePercent: number; + ownerCount: number; + byOwner: { owner: string; fileCount: number }[]; + }; +} { const db = openReadonlyOrFail(customDbPath); try { const dbPath = findDbPath(customDbPath); @@ -187,10 +184,9 @@ export function ownersData(customDbPath, opts = {}) { } // Get all distinct files from nodes - let allFiles = db - .prepare('SELECT DISTINCT file FROM nodes') - .all() - .map((r) => r.file); + let allFiles = (db.prepare('SELECT DISTINCT file FROM nodes').all() as { file: string }[]).map( + (r) => r.file, + ); if (opts.noTests) allFiles = allFiles.filter((f) => !isTestFile(f)); const fileFilters = normalizeFileFilter(opts.file); @@ -205,28 +201,32 @@ export function ownersData(customDbPath, opts = {}) { })); // Build owner-to-files index - const ownerIndex = new Map(); + const ownerIndex = new Map(); let ownedCount = 0; for (const fo of fileOwners) { if (fo.owners.length > 0) ownedCount++; for (const o of fo.owners) { if (!ownerIndex.has(o)) ownerIndex.set(o, []); - ownerIndex.get(o).push(fo.file); + ownerIndex.get(o)!.push(fo.file); } } // Filter files if --owner specified let filteredFiles = fileOwners; if (opts.owner) { - filteredFiles = fileOwners.filter((fo) => fo.owners.includes(opts.owner)); + filteredFiles = fileOwners.filter((fo) => fo.owners.includes(opts.owner!)); } // Get symbols for filtered files const fileSet = new Set(filteredFiles.map((fo) => fo.file)); - let symbols = db - .prepare('SELECT name, kind, file, line FROM nodes') - .all() - .filter((n) => fileSet.has(n.file)); + let symbols = ( + db.prepare('SELECT name, kind, file, line FROM nodes').all() as { + name: string; + kind: string; + file: string; + line: number; + }[] + ).filter((n) => fileSet.has(n.file)); if (opts.noTests) symbols = symbols.filter((s) => !isTestFile(s.file)); if (opts.kind) symbols = symbols.filter((s) => s.kind === opts.kind); @@ -237,7 +237,11 @@ export function ownersData(customDbPath, opts = {}) { })); // Boundary analysis — cross-owner call edges - const boundaries = []; + const boundaries: { + from: { name: string; kind: string; file: string; line: number; owners: string[] }; + to: { name: string; kind: string; file: string; line: number; owners: string[] }; + edgeKind: string; + }[] = []; if (opts.boundary) { const edges = db .prepare( @@ -249,7 +253,18 @@ export function ownersData(customDbPath, opts = {}) { JOIN nodes t ON e.target_id = t.id WHERE e.kind = 'calls'`, ) - .all(); + .all() as { + id: number; + edgeKind: string; + srcName: string; + srcKind: string; + srcFile: string; + srcLine: number; + tgtName: string; + tgtKind: string; + tgtFile: string; + tgtLine: number; + }[]; for (const e of edges) { if (opts.noTests && (isTestFile(e.srcFile) || isTestFile(e.tgtFile))) continue; diff --git a/src/features/sequence.js b/src/features/sequence.ts similarity index 64% rename from src/features/sequence.js rename to src/features/sequence.ts index 8fd5a74f..94202c59 100644 --- a/src/features/sequence.js +++ b/src/features/sequence.ts @@ -1,49 +1,34 @@ -/** - * Sequence diagram generation – Mermaid sequenceDiagram from call graph edges. - * - * Participants are files (not individual functions). Calls within the same file - * become self-messages. This keeps diagrams readable and matches typical - * sequence-diagram conventions. - */ - -import { openRepo } from '../db/index.js'; +import { openRepo, type Repository } from '../db/index.js'; import { SqliteRepository } from '../db/repository/sqlite-repository.js'; import { findMatchingNodes } from '../domain/queries.js'; import { loadConfig } from '../infrastructure/config.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { CodegraphConfig, NodeRowWithFanIn } from '../types.js'; import { FRAMEWORK_ENTRY_PREFIXES } from './structure.js'; // ─── Alias generation ──────────────────────────────────────────────── -/** - * Build short participant aliases from file paths with collision handling. - * e.g. "src/builder.js" → "builder", but if two files share basename, - * progressively add parent dirs: "src/builder" vs "lib/builder". - */ -function buildAliases(files) { - const aliases = new Map(); - const basenames = new Map(); +function buildAliases(files: string[]): Map { + const aliases = new Map(); + const basenames = new Map(); // Group by basename for (const file of files) { - const base = file - .split('/') - .pop() - .replace(/\.[^.]+$/, ''); + const base = (file.split('/').pop() ?? file).replace(/\.[^.]+$/, ''); if (!basenames.has(base)) basenames.set(base, []); - basenames.get(base).push(file); + basenames.get(base)!.push(file); } for (const [base, paths] of basenames) { if (paths.length === 1) { - aliases.set(paths[0], base); + aliases.set(paths[0]!, base); } else { // Collision — progressively add parent dirs until aliases are unique for (let depth = 2; depth <= 10; depth++) { - const trial = new Map(); + const trial = new Map(); let allUnique = true; - const seen = new Set(); + const seen = new Set(); for (const p of paths) { const parts = p.replace(/\.[^.]+$/, '').split('/'); @@ -71,8 +56,16 @@ function buildAliases(files) { // ─── Helpers ───────────────────────────────────────────────────────── -function findEntryNode(repo, name, opts) { - let matchNode = findMatchingNodes(repo, name, opts)[0] ?? null; +interface MatchNode extends NodeRowWithFanIn { + _relevance: number; +} + +function findEntryNode( + repo: Repository, + name: string, + opts: { noTests?: boolean; file?: string; kind?: string }, +): MatchNode | null { + let matchNode: MatchNode | null = findMatchingNodes(repo, name, opts)[0] ?? null; if (!matchNode) { for (const prefix of FRAMEWORK_ENTRY_PREFIXES) { matchNode = findMatchingNodes(repo, `${prefix}${name}`, opts)[0] ?? null; @@ -82,21 +75,44 @@ function findEntryNode(repo, name, opts) { return matchNode; } -function bfsCallees(repo, matchNode, maxDepth, noTests) { - const visited = new Set([matchNode.id]); +interface SequenceMessage { + from: string; + to: string; + label: string; + type: 'call' | 'return'; + depth: number; +} + +interface BfsResult { + messages: SequenceMessage[]; + fileSet: Set; + idToNode: Map; + truncated: boolean; +} + +function bfsCallees( + repo: Repository, + matchNode: MatchNode, + maxDepth: number, + noTests: boolean, +): BfsResult { + const visited = new Set([matchNode.id]); let frontier = [matchNode.id]; - const messages = []; - const fileSet = new Set([matchNode.file]); - const idToNode = new Map(); + const messages: SequenceMessage[] = []; + const fileSet = new Set([matchNode.file]); + const idToNode = new Map< + number, + { id: number; name: string; file: string; kind: string; line: number } + >(); idToNode.set(matchNode.id, matchNode); let truncated = false; for (let d = 1; d <= maxDepth; d++) { - const nextFrontier = []; + const nextFrontier: number[] = []; for (const fid of frontier) { const callees = repo.findCallees(fid); - const caller = idToNode.get(fid); + const caller = idToNode.get(fid)!; for (const c of callees) { if (noTests && isTestFile(c.file)) continue; @@ -130,13 +146,17 @@ function bfsCallees(repo, matchNode, maxDepth, noTests) { return { messages, fileSet, idToNode, truncated }; } -function annotateDataflow(repo, messages, idToNode) { +function annotateDataflow( + repo: Repository, + messages: SequenceMessage[], + idToNode: Map, +): void { const hasTable = repo.hasDataflowTable(); if (!hasTable || !(repo instanceof SqliteRepository)) return; const db = repo.db; - const nodeByNameFile = new Map(); + const nodeByNameFile = new Map(); for (const n of idToNode.values()) { nodeByNameFile.set(`${n.name}|${n.file}`, n); } @@ -151,7 +171,7 @@ function annotateDataflow(repo, messages, idToNode) { ORDER BY d.param_index`, ); - const seenReturns = new Set(); + const seenReturns = new Set(); for (const msg of [...messages]) { if (msg.type !== 'call') continue; const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`); @@ -160,11 +180,11 @@ function annotateDataflow(repo, messages, idToNode) { const returnKey = `${msg.to}->${msg.from}:${msg.label}`; if (seenReturns.has(returnKey)) continue; - const returns = getReturns.all(targetNode.id); + const returns = getReturns.all(targetNode.id) as { expression: string }[]; if (returns.length > 0) { seenReturns.add(returnKey); - const expr = returns[0].expression || 'result'; + const expr = returns[0]!.expression || 'result'; messages.push({ from: msg.to, to: msg.from, @@ -180,7 +200,7 @@ function annotateDataflow(repo, messages, idToNode) { const targetNode = nodeByNameFile.get(`${msg.label}|${msg.to}`); if (!targetNode) continue; - const params = getFlowsTo.all(targetNode.id); + const params = getFlowsTo.all(targetNode.id) as { expression: string }[]; if (params.length > 0) { const paramNames = params @@ -194,11 +214,20 @@ function annotateDataflow(repo, messages, idToNode) { } } -function buildParticipants(fileSet, entryFile) { +interface Participant { + id: string; + label: string; + file: string; +} + +function buildParticipants( + fileSet: Set, + entryFile: string, +): { participants: Participant[]; aliases: Map } { const aliases = buildAliases([...fileSet]); - const participants = [...fileSet].map((file) => ({ - id: aliases.get(file), - label: file.split('/').pop(), + const participants: Participant[] = [...fileSet].map((file) => ({ + id: aliases.get(file)!, + label: file.split('/').pop() ?? file, file, })); @@ -213,26 +242,46 @@ function buildParticipants(fileSet, entryFile) { // ─── Core data function ────────────────────────────────────────────── -/** - * Build sequence diagram data by BFS-forward from an entry point. - * - * @param {string} name - Symbol name to trace from - * @param {string} [dbPath] - * @param {object} [opts] - * @param {number} [opts.depth=10] - * @param {boolean} [opts.noTests] - * @param {string} [opts.file] - * @param {string} [opts.kind] - * @param {boolean} [opts.dataflow] - * @param {number} [opts.limit] - * @param {number} [opts.offset] - * @returns {{ entry, participants, messages, depth, totalMessages, truncated }} - */ -export function sequenceData(name, dbPath, opts = {}) { +interface SequenceDataOpts { + depth?: number; + noTests?: boolean; + file?: string; + kind?: string; + dataflow?: boolean; + limit?: number; + offset?: number; + config?: CodegraphConfig; + repo?: Repository; +} + +interface SequenceEntry { + name: string; + file: string; + kind: string; + line: number; +} + +interface SequenceDataResult { + entry: SequenceEntry | null; + participants: Participant[]; + messages: SequenceMessage[]; + depth: number; + totalMessages: number; + truncated: boolean; +} + +export function sequenceData( + name: string, + dbPath?: string, + opts: SequenceDataOpts = {}, +): SequenceDataResult { const { repo, close } = openRepo(dbPath, opts); try { const config = opts.config || loadConfig(); - const maxDepth = opts.depth || config.analysis?.sequenceDepth || 10; + const maxDepth = + opts.depth || + (config as unknown as { analysis?: { sequenceDepth?: number } }).analysis?.sequenceDepth || + 10; const noTests = opts.noTests || false; const matchNode = findEntryNode(repo, name, opts); @@ -247,7 +296,7 @@ export function sequenceData(name, dbPath, opts = {}) { }; } - const entry = { + const entry: SequenceEntry = { name: matchNode.name, file: matchNode.file, kind: matchNode.kind, @@ -275,8 +324,8 @@ export function sequenceData(name, dbPath, opts = {}) { const { participants, aliases } = buildParticipants(fileSet, entry.file); for (const msg of messages) { - msg.from = aliases.get(msg.from); - msg.to = aliases.get(msg.to); + msg.from = aliases.get(msg.from)!; + msg.to = aliases.get(msg.to)!; } const base = { @@ -287,7 +336,10 @@ export function sequenceData(name, dbPath, opts = {}) { totalMessages: messages.length, truncated, }; - const result = paginateResult(base, 'messages', { limit: opts.limit, offset: opts.offset }); + const result = paginateResult(base, 'messages', { + limit: opts.limit, + offset: opts.offset, + }) as SequenceDataResult; if (opts.limit !== undefined || opts.offset !== undefined) { const activeFiles = new Set(result.messages.flatMap((m) => [m.from, m.to])); result.participants = result.participants.filter((p) => activeFiles.has(p.id)); diff --git a/src/features/shared/find-nodes.js b/src/features/shared/find-nodes.ts similarity index 51% rename from src/features/shared/find-nodes.js rename to src/features/shared/find-nodes.ts index e71b1eac..8a6fb46f 100644 --- a/src/features/shared/find-nodes.js +++ b/src/features/shared/find-nodes.ts @@ -1,22 +1,19 @@ import { buildFileConditionSQL } from '../../db/query-builder.js'; import { isTestFile } from '../../infrastructure/test-filter.js'; +import type { BetterSqlite3Database, NodeRow } from '../../types.js'; -/** - * Look up node(s) by name with optional file/kind/noTests filtering. - * - * @param {object} db - open SQLite database handle - * @param {string} name - symbol name (partial LIKE match) - * @param {object} [opts] - { kind, file, noTests } - * @param {string[]} defaultKinds - fallback kinds when opts.kind is not set - * @returns {object[]} matching node rows - */ -export function findNodes(db, name, opts = {}, defaultKinds = []) { +export function findNodes( + db: BetterSqlite3Database, + name: string, + opts: { kind?: string; file?: string | string[]; noTests?: boolean } = {}, + defaultKinds: string[] = [], +): NodeRow[] { const kinds = opts.kind ? [opts.kind] : defaultKinds; if (kinds.length === 0) throw new Error('findNodes: no kinds specified'); const placeholders = kinds.map(() => '?').join(', '); - const params = [`%${name}%`, ...kinds]; + const params: unknown[] = [`%${name}%`, ...kinds]; - const fc = buildFileConditionSQL(opts.file, 'file'); + const fc = buildFileConditionSQL(opts.file ?? [], 'file'); params.push(...fc.params); const rows = db @@ -25,7 +22,7 @@ export function findNodes(db, name, opts = {}, defaultKinds = []) { WHERE name LIKE ? AND kind IN (${placeholders})${fc.sql} ORDER BY file, line`, ) - .all(...params); + .all(...params) as NodeRow[]; return opts.noTests ? rows.filter((n) => !isTestFile(n.file)) : rows; } diff --git a/src/features/snapshot.js b/src/features/snapshot.ts similarity index 63% rename from src/features/snapshot.js rename to src/features/snapshot.ts index 71baf8d2..ea09c2f8 100644 --- a/src/features/snapshot.js +++ b/src/features/snapshot.ts @@ -7,11 +7,7 @@ import { ConfigError, DbError } from '../shared/errors.js'; const NAME_RE = /^[a-zA-Z0-9_-]+$/; -/** - * Validate a snapshot name (alphanumeric, hyphens, underscores only). - * Throws on invalid input. - */ -export function validateSnapshotName(name) { +export function validateSnapshotName(name: string): void { if (!name || !NAME_RE.test(name)) { throw new ConfigError( `Invalid snapshot name "${name}". Use only letters, digits, hyphens, and underscores.`, @@ -19,24 +15,19 @@ export function validateSnapshotName(name) { } } -/** - * Return the snapshots directory for a given DB path. - */ -export function snapshotsDir(dbPath) { +export function snapshotsDir(dbPath: string): string { return path.join(path.dirname(dbPath), 'snapshots'); } -/** - * Save a snapshot of the current graph database. - * Uses VACUUM INTO for an atomic, WAL-free copy. - * - * @param {string} name - Snapshot name - * @param {object} [options] - * @param {string} [options.dbPath] - Explicit path to graph.db - * @param {boolean} [options.force] - Overwrite existing snapshot - * @returns {{ name: string, path: string, size: number }} - */ -export function snapshotSave(name, options = {}) { +interface SnapshotSaveOptions { + dbPath?: string; + force?: boolean; +} + +export function snapshotSave( + name: string, + options: SnapshotSaveOptions = {}, +): { name: string; path: string; size: number } { validateSnapshotName(name); const dbPath = options.dbPath || findDbPath(); if (!fs.existsSync(dbPath)) { @@ -56,7 +47,8 @@ export function snapshotSave(name, options = {}) { fs.mkdirSync(dir, { recursive: true }); - const db = new Database(dbPath, { readonly: true }); + // biome-ignore lint/suspicious/noExplicitAny: better-sqlite3 default export typing + const db = new (Database as any)(dbPath, { readonly: true }); try { db.exec(`VACUUM INTO '${dest.replace(/'/g, "''")}'`); } finally { @@ -68,15 +60,11 @@ export function snapshotSave(name, options = {}) { return { name, path: dest, size: stat.size }; } -/** - * Restore a snapshot over the current graph database. - * Removes WAL/SHM sidecar files before overwriting. - * - * @param {string} name - Snapshot name - * @param {object} [options] - * @param {string} [options.dbPath] - Explicit path to graph.db - */ -export function snapshotRestore(name, options = {}) { +interface SnapshotDbPathOptions { + dbPath?: string; +} + +export function snapshotRestore(name: string, options: SnapshotDbPathOptions = {}): void { validateSnapshotName(name); const dbPath = options.dbPath || findDbPath(); const dir = snapshotsDir(dbPath); @@ -99,14 +87,14 @@ export function snapshotRestore(name, options = {}) { debug(`Restored snapshot "${name}" → ${dbPath}`); } -/** - * List all saved snapshots. - * - * @param {object} [options] - * @param {string} [options.dbPath] - Explicit path to graph.db - * @returns {Array<{ name: string, path: string, size: number, createdAt: Date }>} - */ -export function snapshotList(options = {}) { +interface SnapshotEntry { + name: string; + path: string; + size: number; + createdAt: Date; +} + +export function snapshotList(options: SnapshotDbPathOptions = {}): SnapshotEntry[] { const dbPath = options.dbPath || findDbPath(); const dir = snapshotsDir(dbPath); @@ -125,17 +113,10 @@ export function snapshotList(options = {}) { createdAt: stat.birthtime, }; }) - .sort((a, b) => b.createdAt - a.createdAt); + .sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime()); } -/** - * Delete a named snapshot. - * - * @param {string} name - Snapshot name - * @param {object} [options] - * @param {string} [options.dbPath] - Explicit path to graph.db - */ -export function snapshotDelete(name, options = {}) { +export function snapshotDelete(name: string, options: SnapshotDbPathOptions = {}): void { validateSnapshotName(name); const dbPath = options.dbPath || findDbPath(); const dir = snapshotsDir(dbPath); diff --git a/src/features/structure.js b/src/features/structure.ts similarity index 73% rename from src/features/structure.js rename to src/features/structure.ts index cfbf0e4c..099d03bb 100644 --- a/src/features/structure.js +++ b/src/features/structure.ts @@ -5,11 +5,23 @@ import { debug } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { normalizePath } from '../shared/constants.js'; import { paginateResult } from '../shared/paginate.js'; +import type { BetterSqlite3Database, CodegraphConfig } from '../types.js'; // ─── Build-time helpers ─────────────────────────────────────────────── -function getAncestorDirs(filePaths) { - const dirs = new Set(); +interface NodeIdStmt { + get(name: string, kind: string, file: string, line: number): { id: number } | undefined; +} + +interface FileSymbolData { + definitions: { name: string; kind: string; line: number }[]; + imports: unknown[]; + exports: unknown[]; + calls?: unknown[]; +} + +function getAncestorDirs(filePaths: string[]): Set { + const dirs = new Set(); for (const f of filePaths) { let d = normalizePath(path.dirname(f)); while (d && d !== '.') { @@ -20,9 +32,14 @@ function getAncestorDirs(filePaths) { return dirs; } -function cleanupPreviousData(db, getNodeIdStmt, isIncremental, changedFiles) { +function cleanupPreviousData( + db: BetterSqlite3Database, + getNodeIdStmt: NodeIdStmt, + isIncremental: boolean, + changedFiles: string[] | null, +): void { if (isIncremental) { - const affectedDirs = getAncestorDirs(changedFiles); + const affectedDirs = getAncestorDirs(changedFiles!); const deleteContainsForDir = db.prepare( "DELETE FROM edges WHERE kind = 'contains' AND source_id IN (SELECT id FROM nodes WHERE name = ? AND kind = 'directory')", ); @@ -31,7 +48,7 @@ function cleanupPreviousData(db, getNodeIdStmt, isIncremental, changedFiles) { for (const dir of affectedDirs) { deleteContainsForDir.run(dir); } - for (const f of changedFiles) { + for (const f of changedFiles!) { const fileRow = getNodeIdStmt.get(f, 'file', f, 0); if (fileRow) deleteMetricForNode.run(fileRow.id); } @@ -50,8 +67,11 @@ function cleanupPreviousData(db, getNodeIdStmt, isIncremental, changedFiles) { } } -function collectAllDirectories(directories, fileSymbols) { - const allDirs = new Set(); +function collectAllDirectories( + directories: Set | Iterable, + fileSymbols: Map, +): Set { + const allDirs = new Set(); for (const dir of directories) { let d = dir; while (d && d !== '.') { @@ -69,9 +89,20 @@ function collectAllDirectories(directories, fileSymbols) { return allDirs; } -function insertContainsEdges(db, insertEdge, getNodeIdStmt, fileSymbols, allDirs, changedFiles) { +interface SqliteStatement { + run(...params: unknown[]): unknown; +} + +function insertContainsEdges( + db: BetterSqlite3Database, + insertEdge: SqliteStatement, + getNodeIdStmt: NodeIdStmt, + fileSymbols: Map, + allDirs: Set, + changedFiles: string[] | null, +): void { const isIncremental = changedFiles != null && changedFiles.length > 0; - const affectedDirs = isIncremental ? getAncestorDirs(changedFiles) : null; + const affectedDirs = isIncremental ? getAncestorDirs(changedFiles!) : null; db.transaction(() => { for (const relPath of fileSymbols.keys()) { @@ -97,9 +128,18 @@ function insertContainsEdges(db, insertEdge, getNodeIdStmt, fileSymbols, allDirs })(); } -function computeImportEdgeMaps(db) { - const fanInMap = new Map(); - const fanOutMap = new Map(); +interface ImportEdge { + source_file: string; + target_file: string; +} + +function computeImportEdgeMaps(db: BetterSqlite3Database): { + fanInMap: Map; + fanOutMap: Map; + importEdges: ImportEdge[]; +} { + const fanInMap = new Map(); + const fanOutMap = new Map(); const importEdges = db .prepare(` SELECT n1.file AS source_file, n2.file AS target_file @@ -109,7 +149,7 @@ function computeImportEdgeMaps(db) { WHERE e.kind IN ('imports', 'imports-type') AND n1.file != n2.file `) - .all(); + .all() as ImportEdge[]; for (const { source_file, target_file } of importEdges) { fanOutMap.set(source_file, (fanOutMap.get(source_file) || 0) + 1); @@ -119,21 +159,21 @@ function computeImportEdgeMaps(db) { } function computeFileMetrics( - db, - upsertMetric, - getNodeIdStmt, - fileSymbols, - lineCountMap, - fanInMap, - fanOutMap, -) { + db: BetterSqlite3Database, + upsertMetric: SqliteStatement, + getNodeIdStmt: NodeIdStmt, + fileSymbols: Map, + lineCountMap: Map, + fanInMap: Map, + fanOutMap: Map, +): void { db.transaction(() => { for (const [relPath, symbols] of fileSymbols) { const fileRow = getNodeIdStmt.get(relPath, 'file', relPath, 0); if (!fileRow) continue; const lineCount = lineCountMap.get(relPath) || 0; - const seen = new Set(); + const seen = new Set(); let symbolCount = 0; for (const d of symbols.definitions) { const key = `${d.name}|${d.kind}|${d.line}`; @@ -163,14 +203,14 @@ function computeFileMetrics( } function computeDirectoryMetrics( - db, - upsertMetric, - getNodeIdStmt, - fileSymbols, - allDirs, - importEdges, -) { - const dirFiles = new Map(); + db: BetterSqlite3Database, + upsertMetric: SqliteStatement, + getNodeIdStmt: NodeIdStmt, + fileSymbols: Map, + allDirs: Set, + importEdges: ImportEdge[], +): void { + const dirFiles = new Map(); for (const dir of allDirs) { dirFiles.set(dir, []); } @@ -178,21 +218,21 @@ function computeDirectoryMetrics( let d = normalizePath(path.dirname(relPath)); while (d && d !== '.') { if (dirFiles.has(d)) { - dirFiles.get(d).push(relPath); + dirFiles.get(d)!.push(relPath); } d = normalizePath(path.dirname(d)); } } - const fileToAncestorDirs = new Map(); + const fileToAncestorDirs = new Map>(); for (const [dir, files] of dirFiles) { for (const f of files) { if (!fileToAncestorDirs.has(f)) fileToAncestorDirs.set(f, new Set()); - fileToAncestorDirs.get(f).add(dir); + fileToAncestorDirs.get(f)!.add(dir); } } - const dirEdgeCounts = new Map(); + const dirEdgeCounts = new Map(); for (const dir of allDirs) { dirEdgeCounts.set(dir, { intra: 0, fanIn: 0, fanOut: 0 }); } @@ -233,7 +273,7 @@ function computeDirectoryMetrics( for (const f of files) { const sym = fileSymbols.get(f); if (sym) { - const seen = new Set(); + const seen = new Set(); for (const d of sym.definitions) { const key = `${d.name}|${d.kind}|${d.line}`; if (!seen.has(key)) { @@ -265,22 +305,19 @@ function computeDirectoryMetrics( // ─── Build-time: insert directory nodes, contains edges, and metrics ──── -/** - * Build directory structure nodes, containment edges, and compute metrics. - * Called from builder.js after edge building. - * - * @param {import('better-sqlite3').Database} db - Open read-write database - * @param {Map} fileSymbols - Map of relPath → { definitions, imports, exports, calls } - * @param {string} rootDir - Absolute root directory - * @param {Map} lineCountMap - Map of relPath → line count - * @param {Set} directories - Set of relative directory paths - */ -export function buildStructure(db, fileSymbols, _rootDir, lineCountMap, directories, changedFiles) { +export function buildStructure( + db: BetterSqlite3Database, + fileSymbols: Map, + _rootDir: string, + lineCountMap: Map, + directories: Set, + changedFiles?: string[] | null, +): void { const insertNode = db.prepare( 'INSERT OR IGNORE INTO nodes (name, kind, file, line, end_line) VALUES (?, ?, ?, ?, ?)', ); - const getNodeIdStmt = { - get: (name, kind, file, line) => { + const getNodeIdStmt: NodeIdStmt = { + get: (name: string, kind: string, file: string, line: number) => { const id = getNodeId(db, name, kind, file, line); return id != null ? { id } : undefined; }, @@ -296,7 +333,7 @@ export function buildStructure(db, fileSymbols, _rootDir, lineCountMap, director const isIncremental = changedFiles != null && changedFiles.length > 0; - cleanupPreviousData(db, getNodeIdStmt, isIncremental, changedFiles); + cleanupPreviousData(db, getNodeIdStmt, isIncremental, changedFiles ?? null); const allDirs = collectAllDirectories(directories, fileSymbols); @@ -306,7 +343,7 @@ export function buildStructure(db, fileSymbols, _rootDir, lineCountMap, director } })(); - insertContainsEdges(db, insertEdge, getNodeIdStmt, fileSymbols, allDirs, changedFiles); + insertContainsEdges(db, insertEdge, getNodeIdStmt, fileSymbols, allDirs, changedFiles ?? null); const { fanInMap, fanOutMap, importEdges } = computeImportEdgeMaps(db); @@ -332,7 +369,22 @@ export { FRAMEWORK_ENTRY_PREFIXES } from '../graph/classifiers/roles.js'; import { classifyRoles } from '../graph/classifiers/roles.js'; -export function classifyNodeRoles(db) { +interface RoleSummary { + entry: number; + core: number; + utility: number; + adapter: number; + dead: number; + 'dead-leaf': number; + 'dead-entry': number; + 'dead-ffi': number; + 'dead-unresolved': number; + 'test-only': number; + leaf: number; + [key: string]: number; +} + +export function classifyNodeRoles(db: BetterSqlite3Database): RoleSummary { const rows = db .prepare( `SELECT n.id, n.name, n.kind, n.file, @@ -347,7 +399,14 @@ export function classifyNodeRoles(db) { ) fo ON n.id = fo.source_id WHERE n.kind NOT IN ('file', 'directory')`, ) - .all(); + .all() as { + id: number; + name: string; + kind: string; + file: string; + fan_in: number; + fan_out: number; + }[]; if (rows.length === 0) { return { @@ -366,20 +425,21 @@ export function classifyNodeRoles(db) { } const exportedIds = new Set( - db - .prepare( - `SELECT DISTINCT e.target_id + ( + db + .prepare( + `SELECT DISTINCT e.target_id FROM edges e JOIN nodes caller ON e.source_id = caller.id JOIN nodes target ON e.target_id = target.id WHERE e.kind = 'calls' AND caller.file != target.file`, - ) - .all() - .map((r) => r.target_id), + ) + .all() as { target_id: number }[] + ).map((r) => r.target_id), ); // Compute production fan-in (excluding callers in test files) - const prodFanInMap = new Map(); + const prodFanInMap = new Map(); const prodRows = db .prepare( `SELECT e.target_id, COUNT(*) AS cnt @@ -389,7 +449,7 @@ export function classifyNodeRoles(db) { ${testFilterSQL('caller.file')} GROUP BY e.target_id`, ) - .all(); + .all() as { target_id: number; cnt: number }[]; for (const r of prodRows) { prodFanInMap.set(r.target_id, r.cnt); } @@ -409,7 +469,7 @@ export function classifyNodeRoles(db) { const roleMap = classifyRoles(classifierInput); // Build summary and updates - const summary = { + const summary: RoleSummary = { entry: 0, core: 0, utility: 0, @@ -422,7 +482,7 @@ export function classifyNodeRoles(db) { 'test-only': 0, leaf: 0, }; - const updates = []; + const updates: { id: number; role: string }[] = []; for (const row of rows) { const role = roleMap.get(String(row.id)) || 'leaf'; updates.push({ id: row.id, role }); @@ -445,10 +505,67 @@ export function classifyNodeRoles(db) { // ─── Query functions (read-only) ────────────────────────────────────── -/** - * Return hierarchical directory tree with metrics. - */ -export function structureData(customDbPath, opts = {}) { +interface DirRow { + id: number; + name: string; + file: string; + symbol_count: number | null; + fan_in: number | null; + fan_out: number | null; + cohesion: number | null; + file_count: number | null; +} + +interface FileMetricRow { + name: string; + line_count: number | null; + symbol_count: number | null; + import_count: number | null; + export_count: number | null; + fan_in: number | null; + fan_out: number | null; +} + +interface StructureDataOpts { + directory?: string; + depth?: number; + sort?: string; + noTests?: boolean; + full?: boolean; + fileLimit?: number; + limit?: number; + offset?: number; +} + +interface DirectoryEntry { + directory: string; + fileCount: number; + symbolCount: number; + fanIn: number; + fanOut: number; + cohesion: number | null; + density: number; + files: { + file: string; + lineCount: number; + symbolCount: number; + importCount: number; + exportCount: number; + fanIn: number; + fanOut: number; + }[]; + subdirectories: string[]; +} + +export function structureData( + customDbPath?: string, + opts: StructureDataOpts = {}, +): { + directories: DirectoryEntry[]; + count: number; + suppressed?: number; + warning?: string; +} { const db = openReadonlyOrFail(customDbPath); try { const rawDir = opts.directory || null; @@ -467,7 +584,7 @@ export function structureData(customDbPath, opts = {}) { LEFT JOIN node_metrics nm ON n.id = nm.node_id WHERE n.kind = 'directory' `) - .all(); + .all() as DirRow[]; if (filterDir) { const norm = normalizePath(filterDir); @@ -487,7 +604,7 @@ export function structureData(customDbPath, opts = {}) { dirs.sort(sortFn); // Get file metrics for each directory - const result = dirs.map((d) => { + const result: DirectoryEntry[] = dirs.map((d) => { let files = db .prepare(` SELECT n.name, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count, nm.fan_in, nm.fan_out @@ -496,7 +613,7 @@ export function structureData(customDbPath, opts = {}) { LEFT JOIN node_metrics nm ON n.id = nm.node_id WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' `) - .all(d.id); + .all(d.id) as FileMetricRow[]; if (noTests) files = files.filter((f) => !isTestFile(f.name)); const subdirs = db @@ -506,7 +623,7 @@ export function structureData(customDbPath, opts = {}) { JOIN nodes n ON e.target_id = n.id WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'directory' `) - .all(d.id); + .all(d.id) as { name: string }[]; const fileCount = noTests ? files.length : d.file_count || 0; return { @@ -563,10 +680,36 @@ export function structureData(customDbPath, opts = {}) { } } -/** - * Return top N files or directories ranked by a chosen metric. - */ -export function hotspotsData(customDbPath, opts = {}) { +interface HotspotRow { + name: string; + kind: string; + line_count: number | null; + symbol_count: number | null; + import_count: number | null; + export_count: number | null; + fan_in: number | null; + fan_out: number | null; + cohesion: number | null; + file_count: number | null; +} + +interface HotspotsDataOpts { + metric?: string; + level?: string; + limit?: number; + offset?: number; + noTests?: boolean; +} + +export function hotspotsData( + customDbPath?: string, + opts: HotspotsDataOpts = {}, +): { + metric: string; + level: string; + limit: number; + hotspots: unknown[]; +} { const db = openReadonlyOrFail(customDbPath); try { const metric = opts.metric || 'fan-in'; @@ -578,7 +721,7 @@ export function hotspotsData(customDbPath, opts = {}) { const testFilter = testFilterSQL('n.name', noTests && kind === 'file'); - const HOTSPOT_QUERIES = { + const HOTSPOT_QUERIES: Record = { 'fan-in': db.prepare(` SELECT n.name, n.kind, nm.line_count, nm.symbol_count, nm.import_count, nm.export_count, nm.fan_in, nm.fan_out, nm.cohesion, nm.file_count @@ -601,8 +744,8 @@ export function hotspotsData(customDbPath, opts = {}) { WHERE n.kind = ? ${testFilter} ORDER BY (COALESCE(nm.fan_in, 0) + COALESCE(nm.fan_out, 0)) DESC NULLS LAST LIMIT ?`), }; - const stmt = HOTSPOT_QUERIES[metric] || HOTSPOT_QUERIES['fan-in']; - const rows = stmt.all(kind, limit); + const stmt = HOTSPOT_QUERIES[metric] ?? HOTSPOT_QUERIES['fan-in']!; + const rows = stmt!.all(kind, limit); const hotspots = rows.map((r) => ({ name: r.name, @@ -616,10 +759,10 @@ export function hotspotsData(customDbPath, opts = {}) { cohesion: r.cohesion, fileCount: r.file_count, density: - r.file_count > 0 - ? (r.symbol_count || 0) / r.file_count - : r.line_count > 0 - ? (r.symbol_count || 0) / r.line_count + (r.file_count ?? 0) > 0 + ? (r.symbol_count || 0) / r.file_count! + : (r.line_count ?? 0) > 0 + ? (r.symbol_count || 0) / r.line_count! : 0, coupling: (r.fan_in || 0) + (r.fan_out || 0), })); @@ -631,14 +774,35 @@ export function hotspotsData(customDbPath, opts = {}) { } } -/** - * Return directories with cohesion above threshold, with top exports/imports. - */ -export function moduleBoundariesData(customDbPath, opts = {}) { +interface ModuleBoundariesOpts { + threshold?: number; + config?: CodegraphConfig; +} + +export function moduleBoundariesData( + customDbPath?: string, + opts: ModuleBoundariesOpts = {}, +): { + threshold: number; + modules: { + directory: string; + cohesion: number | null; + fileCount: number; + symbolCount: number; + fanIn: number; + fanOut: number; + files: string[]; + }[]; + count: number; +} { const db = openReadonlyOrFail(customDbPath); try { const config = opts.config || loadConfig(); - const threshold = opts.threshold ?? config.structure?.cohesionThreshold ?? 0.3; + const threshold = + opts.threshold ?? + (config as unknown as { structure?: { cohesionThreshold?: number } }).structure + ?.cohesionThreshold ?? + 0.3; const dirs = db .prepare(` @@ -648,18 +812,27 @@ export function moduleBoundariesData(customDbPath, opts = {}) { WHERE n.kind = 'directory' AND nm.cohesion IS NOT NULL AND nm.cohesion >= ? ORDER BY nm.cohesion DESC `) - .all(threshold); + .all(threshold) as { + id: number; + name: string; + symbol_count: number | null; + fan_in: number | null; + fan_out: number | null; + cohesion: number | null; + file_count: number | null; + }[]; const modules = dirs.map((d) => { // Get files inside this directory - const files = db - .prepare(` + const files = ( + db + .prepare(` SELECT n.name FROM edges e JOIN nodes n ON e.target_id = n.id WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' `) - .all(d.id) - .map((f) => f.name); + .all(d.id) as { name: string }[] + ).map((f) => f.name); return { directory: d.name, @@ -680,7 +853,7 @@ export function moduleBoundariesData(customDbPath, opts = {}) { // ─── Helpers ────────────────────────────────────────────────────────── -function getSortFn(sortBy) { +function getSortFn(sortBy: string): (a: DirRow, b: DirRow) => number { switch (sortBy) { case 'cohesion': return (a, b) => (b.cohesion ?? -1) - (a.cohesion ?? -1); @@ -690,8 +863,8 @@ function getSortFn(sortBy) { return (a, b) => (b.fan_out || 0) - (a.fan_out || 0); case 'density': return (a, b) => { - const da = a.file_count > 0 ? (a.symbol_count || 0) / a.file_count : 0; - const db_ = b.file_count > 0 ? (b.symbol_count || 0) / b.file_count : 0; + const da = (a.file_count ?? 0) > 0 ? (a.symbol_count || 0) / a.file_count! : 0; + const db_ = (b.file_count ?? 0) > 0 ? (b.symbol_count || 0) / b.file_count! : 0; return db_ - da; }; default: diff --git a/src/features/triage.js b/src/features/triage.ts similarity index 58% rename from src/features/triage.js rename to src/features/triage.ts index 7b7d3604..2eea8ffa 100644 --- a/src/features/triage.js +++ b/src/features/triage.ts @@ -1,13 +1,33 @@ -import { openRepo } from '../db/index.js'; +import { openRepo, type Repository } from '../db/index.js'; +import type { RiskResult, RiskWeights } from '../graph/classifiers/risk.js'; import { DEFAULT_WEIGHTS, scoreRisk } from '../graph/classifiers/risk.js'; import { loadConfig } from '../infrastructure/config.js'; import { warn } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; +import type { CodegraphConfig, TriageNodeRow } from '../types.js'; // ─── Scoring ───────────────────────────────────────────────────────── -const SORT_FNS = { +interface TriageItem { + name: string; + kind: string; + file: string; + line: number; + role: string | null; + fanIn: number; + cognitive: number; + churn: number; + maintainabilityIndex: number; + normFanIn: number; + normComplexity: number; + normChurn: number; + normMI: number; + roleWeight: number; + riskScore: number; +} + +const SORT_FNS: Record number> = { risk: (a, b) => b.riskScore - a.riskScore, complexity: (a, b) => b.cognitive - a.cognitive, churn: (a, b) => b.churn - a.churn, @@ -15,13 +35,7 @@ const SORT_FNS = { mi: (a, b) => a.maintainabilityIndex - b.maintainabilityIndex, }; -/** - * Build scored triage items from raw rows and risk metrics. - * @param {object[]} rows - Raw DB rows - * @param {object[]} riskMetrics - Per-row risk metric objects from scoreRisk - * @returns {object[]} - */ -function buildTriageItems(rows, riskMetrics) { +function buildTriageItems(rows: TriageNodeRow[], riskMetrics: RiskResult[]): TriageItem[] { return rows.map((r, i) => ({ name: r.name, kind: r.kind, @@ -32,23 +46,29 @@ function buildTriageItems(rows, riskMetrics) { cognitive: r.cognitive, churn: r.churn, maintainabilityIndex: r.mi, - normFanIn: riskMetrics[i].normFanIn, - normComplexity: riskMetrics[i].normComplexity, - normChurn: riskMetrics[i].normChurn, - normMI: riskMetrics[i].normMI, - roleWeight: riskMetrics[i].roleWeight, - riskScore: riskMetrics[i].riskScore, + normFanIn: riskMetrics[i]!.normFanIn, + normComplexity: riskMetrics[i]!.normComplexity, + normChurn: riskMetrics[i]!.normChurn, + normMI: riskMetrics[i]!.normMI, + roleWeight: riskMetrics[i]!.roleWeight, + riskScore: riskMetrics[i]!.riskScore, })); } -/** - * Compute signal coverage and summary statistics. - * @param {object[]} filtered - All filtered rows - * @param {object[]} scored - Scored and filtered items - * @param {object} weights - Active weights - * @returns {object} - */ -function computeTriageSummary(filtered, scored, weights) { +interface TriageSummary { + total: number; + analyzed: number; + avgScore: number; + maxScore: number; + weights: RiskWeights; + signalCoverage: Record; +} + +function computeTriageSummary( + filtered: TriageNodeRow[], + scored: TriageItem[], + weights: RiskWeights, +): TriageSummary { const signalCoverage = { complexity: round4(filtered.filter((r) => r.cognitive > 0).length / filtered.length), churn: round4(filtered.filter((r) => r.churn > 0).length / filtered.length), @@ -73,7 +93,7 @@ function computeTriageSummary(filtered, scored, weights) { // ─── Data Function ──────────────────────────────────────────────────── -const EMPTY_SUMMARY = (weights) => ({ +const EMPTY_SUMMARY = (weights: RiskWeights): TriageSummary => ({ total: 0, analyzed: 0, avgScore: 0, @@ -82,37 +102,55 @@ const EMPTY_SUMMARY = (weights) => ({ signalCoverage: {}, }); -/** - * Compute composite risk scores for all symbols. - * - * @param {string} [customDbPath] - Path to graph.db - * @param {object} [opts] - * @returns {{ items: object[], summary: object, _pagination?: object }} - */ -export function triageData(customDbPath, opts = {}) { +interface TriageDataOpts { + noTests?: boolean; + minScore?: number | string | null; + sort?: string; + config?: CodegraphConfig; + weights?: Partial; + file?: string; + kind?: string; + role?: string; + limit?: number; + offset?: number; + repo?: Repository; +} + +export function triageData( + customDbPath?: string, + opts: TriageDataOpts = {}, +): { items: TriageItem[]; summary: TriageSummary } { const { repo, close } = openRepo(customDbPath, opts); try { const noTests = opts.noTests || false; const minScore = opts.minScore != null ? Number(opts.minScore) : null; const sort = opts.sort || 'risk'; const config = opts.config || loadConfig(); - const riskConfig = config.risk || {}; - const weights = { ...DEFAULT_WEIGHTS, ...(riskConfig.weights || {}), ...(opts.weights || {}) }; + const riskConfig = ((config as unknown as Record)['risk'] || {}) as { + weights?: Partial; + roleWeights?: Record; + defaultRoleWeight?: number; + }; + const weights: RiskWeights = { + ...DEFAULT_WEIGHTS, + ...(riskConfig.weights || {}), + ...(opts.weights || {}), + }; const riskOpts = { roleWeights: riskConfig.roleWeights, defaultRoleWeight: riskConfig.defaultRoleWeight, }; - let rows; + let rows: TriageNodeRow[]; try { rows = repo.findNodesForTriage({ noTests, - file: opts.file || null, - kind: opts.kind || null, - role: opts.role || null, + file: opts.file || undefined, + kind: opts.kind || undefined, + role: (opts.role || undefined) as TriageDataOpts['role'] & undefined, }); - } catch (err) { - warn(`triage query failed: ${err.message}`); + } catch (err: unknown) { + warn(`triage query failed: ${(err as Error).message}`); return { items: [], summary: EMPTY_SUMMARY(weights) }; } @@ -125,7 +163,7 @@ export function triageData(customDbPath, opts = {}) { const items = buildTriageItems(filtered, riskMetrics); const scored = minScore != null ? items.filter((it) => it.riskScore >= minScore) : items; - scored.sort(SORT_FNS[sort] || SORT_FNS.risk); + scored.sort(SORT_FNS[sort] || SORT_FNS['risk']!); const result = { items: scored, @@ -143,6 +181,6 @@ export function triageData(customDbPath, opts = {}) { // ─── Utilities ──────────────────────────────────────────────────────── -function round4(n) { +function round4(n: number): number { return Math.round(n * 10000) / 10000; } From 77248be0ff75b813eeacce821a7ff7461e4fbb85 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 23 Mar 2026 20:29:01 -0600 Subject: [PATCH 03/26] feat(types): migrate presentation/ and mcp/tools/ to TypeScript (Phase 6c) Migrate all 28 presentation/ files and all 36 mcp/tools/ files to TypeScript: presentation/: - 14 formatters (audit, batch, branch-compare, brief, cfg, check, cochange, communities, complexity, dataflow, export, flow, manifesto, owners) - queries-cli/ barrel + 6 sub-modules (exports, impact, inspect, overview, path) - sequence, sequence-renderer, structure, triage, viewer - result-formatter, query barrel mcp/tools/: - 35 individual tool handlers + barrel index - Add McpToolHandler interface and McpToolContext to server.ts All exports typed with explicit parameter and return types. Update ROADMAP progress to 178/283 (~63%). Impact: 1040 functions changed, 48 affected --- docs/roadmap/ROADMAP.md | 8 +- src/mcp/server.ts | 11 + src/mcp/tools/{ast-query.js => ast-query.ts} | 13 +- src/mcp/tools/{audit.js => audit.ts} | 16 +- src/mcp/tools/batch-query.js | 11 - src/mcp/tools/batch-query.ts | 22 ++ .../{branch-compare.js => branch-compare.ts} | 12 +- src/mcp/tools/brief.js | 8 - src/mcp/tools/brief.ts | 15 ++ src/mcp/tools/{cfg.js => cfg.ts} | 15 +- src/mcp/tools/{check.js => check.ts} | 23 +- .../tools/{co-changes.js => co-changes.ts} | 11 +- src/mcp/tools/code-owners.js | 12 - src/mcp/tools/code-owners.ts | 22 ++ .../tools/{communities.js => communities.ts} | 12 +- .../tools/{complexity.js => complexity.ts} | 16 +- src/mcp/tools/{context.js => context.ts} | 15 +- src/mcp/tools/{dataflow.js => dataflow.ts} | 18 +- .../tools/{diff-impact.js => diff-impact.ts} | 13 +- .../{execution-flow.js => execution-flow.ts} | 14 +- .../{export-graph.js => export-graph.ts} | 16 +- src/mcp/tools/{file-deps.js => file-deps.ts} | 10 +- .../{file-exports.js => file-exports.ts} | 11 +- .../tools/{find-cycles.js => find-cycles.ts} | 6 +- src/mcp/tools/{fn-impact.js => fn-impact.ts} | 13 +- src/mcp/tools/impact-analysis.js | 8 - src/mcp/tools/impact-analysis.ts | 15 ++ ...{implementations.js => implementations.ts} | 12 +- src/mcp/tools/{index.js => index.ts} | 10 +- .../tools/{interfaces.js => interfaces.ts} | 12 +- .../{list-functions.js => list-functions.ts} | 11 +- src/mcp/tools/list-repos.js | 11 - src/mcp/tools/list-repos.ts | 17 ++ src/mcp/tools/module-map.js | 6 - src/mcp/tools/module-map.ts | 13 + .../tools/{node-roles.js => node-roles.ts} | 11 +- src/mcp/tools/path.js | 12 - src/mcp/tools/path.ts | 24 ++ src/mcp/tools/{query.js => query.ts} | 21 +- ...{semantic-search.js => semantic-search.ts} | 15 +- src/mcp/tools/{sequence.js => sequence.ts} | 17 +- src/mcp/tools/{structure.js => structure.ts} | 12 +- src/mcp/tools/symbol-children.js | 14 - src/mcp/tools/symbol-children.ts | 24 ++ src/mcp/tools/{triage.js => triage.ts} | 22 +- src/mcp/tools/{where.js => where.ts} | 11 +- src/presentation/{audit.js => audit.ts} | 27 +- src/presentation/batch.js | 26 -- src/presentation/batch.ts | 49 ++++ .../{branch-compare.js => branch-compare.ts} | 69 ++++- src/presentation/{brief.js => brief.ts} | 40 ++- src/presentation/{cfg.js => cfg.ts} | 42 ++- src/presentation/{check.js => check.ts} | 51 +++- src/presentation/{cochange.js => cochange.ts} | 40 ++- .../{communities.js => communities.ts} | 50 +++- .../{complexity.js => complexity.ts} | 53 +++- src/presentation/dataflow.js | 110 -------- src/presentation/dataflow.ts | 173 ++++++++++++ src/presentation/{export.js => export.ts} | 255 +++++++++++------- src/presentation/{flow.js => flow.ts} | 36 ++- .../{manifesto.js => manifesto.ts} | 33 ++- src/presentation/{owners.js => owners.ts} | 46 +++- .../{queries-cli.js => queries-cli.ts} | 0 .../queries-cli/{exports.js => exports.ts} | 57 +++- .../queries-cli/{impact.js => impact.ts} | 162 +++++++++-- .../queries-cli/{index.js => index.ts} | 0 .../queries-cli/{inspect.js => inspect.ts} | 237 ++++++++++++++-- .../queries-cli/{overview.js => overview.ts} | 179 ++++++++++-- .../queries-cli/{path.js => path.ts} | 62 ++++- src/presentation/{query.js => query.ts} | 4 - ...esult-formatter.js => result-formatter.ts} | 87 +++--- src/presentation/sequence-renderer.js | 43 --- src/presentation/sequence-renderer.ts | 46 ++++ src/presentation/{sequence.js => sequence.ts} | 21 +- .../{structure.js => structure.ts} | 66 ++++- src/presentation/{triage.js => triage.ts} | 51 +++- src/presentation/{viewer.js => viewer.ts} | 150 +++++++---- 77 files changed, 2197 insertions(+), 709 deletions(-) rename src/mcp/tools/{ast-query.js => ast-query.ts} (52%) rename src/mcp/tools/{audit.js => audit.ts} (56%) delete mode 100644 src/mcp/tools/batch-query.js create mode 100644 src/mcp/tools/batch-query.ts rename src/mcp/tools/{branch-compare.js => branch-compare.ts} (55%) delete mode 100644 src/mcp/tools/brief.js create mode 100644 src/mcp/tools/brief.ts rename src/mcp/tools/{cfg.js => cfg.ts} (57%) rename src/mcp/tools/{check.js => check.ts} (65%) rename src/mcp/tools/{co-changes.js => co-changes.ts} (69%) delete mode 100644 src/mcp/tools/code-owners.js create mode 100644 src/mcp/tools/code-owners.ts rename src/mcp/tools/{communities.js => communities.ts} (58%) rename src/mcp/tools/{complexity.js => complexity.ts} (56%) rename src/mcp/tools/{context.js => context.ts} (55%) rename src/mcp/tools/{dataflow.js => dataflow.ts} (58%) rename src/mcp/tools/{diff-impact.js => diff-impact.ts} (67%) rename src/mcp/tools/{execution-flow.js => execution-flow.ts} (69%) rename src/mcp/tools/{export-graph.js => export-graph.ts} (79%) rename src/mcp/tools/{file-deps.js => file-deps.ts} (55%) rename src/mcp/tools/{file-exports.js => file-exports.ts} (55%) rename src/mcp/tools/{find-cycles.js => find-cycles.ts} (66%) rename src/mcp/tools/{fn-impact.js => fn-impact.ts} (55%) delete mode 100644 src/mcp/tools/impact-analysis.js create mode 100644 src/mcp/tools/impact-analysis.ts rename src/mcp/tools/{implementations.js => implementations.ts} (55%) rename src/mcp/tools/{index.js => index.ts} (89%) rename src/mcp/tools/{interfaces.js => interfaces.ts} (55%) rename src/mcp/tools/{list-functions.js => list-functions.ts} (56%) delete mode 100644 src/mcp/tools/list-repos.js create mode 100644 src/mcp/tools/list-repos.ts delete mode 100644 src/mcp/tools/module-map.js create mode 100644 src/mcp/tools/module-map.ts rename src/mcp/tools/{node-roles.js => node-roles.ts} (55%) delete mode 100644 src/mcp/tools/path.js create mode 100644 src/mcp/tools/path.ts rename src/mcp/tools/{query.js => query.ts} (60%) rename src/mcp/tools/{semantic-search.js => semantic-search.ts} (78%) rename src/mcp/tools/{sequence.js => sequence.ts} (54%) rename src/mcp/tools/{structure.js => structure.ts} (57%) delete mode 100644 src/mcp/tools/symbol-children.js create mode 100644 src/mcp/tools/symbol-children.ts rename src/mcp/tools/{triage.js => triage.ts} (60%) rename src/mcp/tools/{where.js => where.ts} (55%) rename src/presentation/{audit.js => audit.ts} (84%) delete mode 100644 src/presentation/batch.js create mode 100644 src/presentation/batch.ts rename src/presentation/{branch-compare.js => branch-compare.ts} (64%) rename src/presentation/{brief.js => brief.ts} (67%) rename src/presentation/{cfg.js => cfg.ts} (66%) rename src/presentation/{check.js => check.ts} (72%) rename src/presentation/{cochange.js => cochange.ts} (58%) rename src/presentation/{communities.js => communities.ts} (69%) rename src/presentation/{complexity.js => complexity.ts} (76%) delete mode 100644 src/presentation/dataflow.js create mode 100644 src/presentation/dataflow.ts rename src/presentation/{export.js => export.ts} (71%) rename src/presentation/{flow.js => flow.ts} (68%) rename src/presentation/{manifesto.js => manifesto.ts} (75%) rename src/presentation/{owners.js => owners.ts} (59%) rename src/presentation/{queries-cli.js => queries-cli.ts} (100%) rename src/presentation/queries-cli/{exports.js => exports.ts} (70%) rename src/presentation/queries-cli/{impact.js => impact.ts} (62%) rename src/presentation/queries-cli/{index.js => index.ts} (100%) rename src/presentation/queries-cli/{inspect.js => inspect.ts} (66%) rename src/presentation/queries-cli/{overview.js => overview.ts} (60%) rename src/presentation/queries-cli/{path.js => path.ts} (54%) rename src/presentation/{query.js => query.ts} (62%) rename src/presentation/{result-formatter.js => result-formatter.ts} (57%) delete mode 100644 src/presentation/sequence-renderer.js create mode 100644 src/presentation/sequence-renderer.ts rename src/presentation/{sequence.js => sequence.ts} (64%) rename src/presentation/{structure.js => structure.ts} (63%) rename src/presentation/{triage.js => triage.ts} (58%) rename src/presentation/{viewer.js => viewer.ts} (85%) diff --git a/docs/roadmap/ROADMAP.md b/docs/roadmap/ROADMAP.md index b95ab37a..093b01e7 100644 --- a/docs/roadmap/ROADMAP.md +++ b/docs/roadmap/ROADMAP.md @@ -1080,7 +1080,7 @@ npm workspaces (`package.json` `workspaces`), `pnpm-workspace.yaml`, and `lerna. ## Phase 5 -- TypeScript Migration -> **Status:** In Progress — 114 of 283 source files migrated (~40%), 169 `.js` files remaining +> **Status:** In Progress — 178 of 283 source files migrated (~63%), 105 `.js` files remaining **Goal:** Migrate the codebase from plain JavaScript to TypeScript, leveraging the clean module boundaries established in Phase 3. Incremental module-by-module migration starting from leaf modules inward. @@ -1151,15 +1151,13 @@ Migrate modules that implement domain logic and Phase 3 interfaces. Some migrate Migrate top-level orchestration, features, and entry points. Some migrated via [#555](https://github.com/optave/codegraph/pull/555), 141 files remaining. -**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts`, `ast-analysis/` (all 18 files), `features/` (all 20 files — ast, audit, batch, boundaries, branch-compare, cfg, check, cochange, communities, complexity, dataflow, flow, graph-enrichment, manifesto, owners, sequence, snapshot, structure, triage, shared/find-nodes) +**Migrated:** `domain/graph/builder.ts` + `context.ts` + `helpers.ts` + `pipeline.ts`, `domain/graph/watcher.ts`, `domain/search/{generator,index,models}.ts`, `mcp/{index,middleware,server,tool-registry}.ts`, `features/export.ts`, `index.ts`, `ast-analysis/` (all 18 files), `features/` (all 20 files), `presentation/` (all 28 files), `mcp/tools/` (all 36 files) -**Remaining (121):** +**Remaining (57):** | Module | Files | Notes | |--------|-------|-------| | `cli.js` + `cli/` | 55 | Commander entry point, 43 command handlers (`commands/`), barrel, shared CLI utilities | -| `mcp/tools/` | 36 | Individual MCP tool handlers + barrel | -| `presentation/` | 28 | Presentation formatters (14 files), `queries-cli/` (7 files), sequence-renderer, viewer, export, etc. | | `index.js` | 1 | Public API exports (stale — `.ts` exists) | **Stale `.js` counterparts to delete (13 files):** `domain/graph/builder.js`, `domain/graph/builder/{context,helpers,pipeline}.js`, `domain/graph/watcher.js`, `domain/search/{generator,index,models}.js`, `features/export.js`, `mcp/{index,middleware,server,tool-registry}.js` — these have `.ts` counterparts already diff --git a/src/mcp/server.ts b/src/mcp/server.ts index a5f98ef2..da9bd5b7 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -19,6 +19,17 @@ import { initMcpDefaults } from './middleware.js'; import { buildToolList } from './tool-registry.js'; import { TOOL_HANDLERS } from './tools/index.js'; +export interface McpToolContext { + dbPath: string; + // biome-ignore lint/suspicious/noExplicitAny: lazy-loaded queries module + getQueries(): Promise; + // biome-ignore lint/suspicious/noExplicitAny: lazy-loaded better-sqlite3 constructor + getDatabase(): any; + findDbPath: typeof findDbPath; + allowedRepos: string[] | undefined; + MCP_MAX_LIMIT: number; +} + interface MCPServerOptionsInternal extends MCPServerOptions { config?: CodegraphConfig; } diff --git a/src/mcp/tools/ast-query.js b/src/mcp/tools/ast-query.ts similarity index 52% rename from src/mcp/tools/ast-query.js rename to src/mcp/tools/ast-query.ts index a353e7ea..c90adca7 100644 --- a/src/mcp/tools/ast-query.js +++ b/src/mcp/tools/ast-query.ts @@ -1,8 +1,19 @@ +import type { ASTNodeKind } from '../../types.js'; import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'ast_query'; -export async function handler(args, ctx) { +interface AstQueryArgs { + pattern?: string; + kind?: ASTNodeKind; + file?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: AstQueryArgs, ctx: McpToolContext): Promise { const { astQueryData } = await import('../../features/ast.js'); return astQueryData(args.pattern, ctx.dbPath, { kind: args.kind, diff --git a/src/mcp/tools/audit.js b/src/mcp/tools/audit.ts similarity index 56% rename from src/mcp/tools/audit.js rename to src/mcp/tools/audit.ts index c4438ba1..1d5a54b5 100644 --- a/src/mcp/tools/audit.js +++ b/src/mcp/tools/audit.ts @@ -1,13 +1,25 @@ import { effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'audit'; -export async function handler(args, ctx) { +interface AuditArgs { + target: string; + quick?: boolean; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: AuditArgs, ctx: McpToolContext): Promise { if (args.quick) { const { explainData } = await ctx.getQueries(); return explainData(args.target, ctx.dbPath, { noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.explain, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['explain'] ?? 100, MCP_MAX_LIMIT), offset: effectiveOffset(args), }); } diff --git a/src/mcp/tools/batch-query.js b/src/mcp/tools/batch-query.js deleted file mode 100644 index 01dc19ba..00000000 --- a/src/mcp/tools/batch-query.js +++ /dev/null @@ -1,11 +0,0 @@ -export const name = 'batch_query'; - -export async function handler(args, ctx) { - const { batchData } = await import('../../features/batch.js'); - return batchData(args.command, args.targets, ctx.dbPath, { - depth: args.depth, - file: args.file, - kind: args.kind, - noTests: args.no_tests, - }); -} diff --git a/src/mcp/tools/batch-query.ts b/src/mcp/tools/batch-query.ts new file mode 100644 index 00000000..57978125 --- /dev/null +++ b/src/mcp/tools/batch-query.ts @@ -0,0 +1,22 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'batch_query'; + +interface BatchQueryArgs { + command: string; + targets: string[]; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; +} + +export async function handler(args: BatchQueryArgs, ctx: McpToolContext): Promise { + const { batchData } = await import('../../features/batch.js'); + return batchData(args.command, args.targets, ctx.dbPath, { + depth: args.depth, + file: args.file, + kind: args.kind, + noTests: args.no_tests, + }); +} diff --git a/src/mcp/tools/branch-compare.js b/src/mcp/tools/branch-compare.ts similarity index 55% rename from src/mcp/tools/branch-compare.js rename to src/mcp/tools/branch-compare.ts index a01022dd..f7e0f5f3 100644 --- a/src/mcp/tools/branch-compare.js +++ b/src/mcp/tools/branch-compare.ts @@ -1,6 +1,16 @@ +import type { McpToolContext } from '../server.js'; + export const name = 'branch_compare'; -export async function handler(args, _ctx) { +interface BranchCompareArgs { + base: string; + target: string; + depth?: number; + no_tests?: boolean; + format?: 'json' | 'mermaid'; +} + +export async function handler(args: BranchCompareArgs, _ctx: McpToolContext): Promise { const { branchCompareData, branchCompareMermaid } = await import( '../../features/branch-compare.js' ); diff --git a/src/mcp/tools/brief.js b/src/mcp/tools/brief.js deleted file mode 100644 index c3c800d5..00000000 --- a/src/mcp/tools/brief.js +++ /dev/null @@ -1,8 +0,0 @@ -export const name = 'brief'; - -export async function handler(args, ctx) { - const { briefData } = await ctx.getQueries(); - return briefData(args.file, ctx.dbPath, { - noTests: args.no_tests, - }); -} diff --git a/src/mcp/tools/brief.ts b/src/mcp/tools/brief.ts new file mode 100644 index 00000000..5814d990 --- /dev/null +++ b/src/mcp/tools/brief.ts @@ -0,0 +1,15 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'brief'; + +interface BriefArgs { + file: string; + no_tests?: boolean; +} + +export async function handler(args: BriefArgs, ctx: McpToolContext): Promise { + const { briefData } = await ctx.getQueries(); + return briefData(args.file, ctx.dbPath, { + noTests: args.no_tests, + }); +} diff --git a/src/mcp/tools/cfg.js b/src/mcp/tools/cfg.ts similarity index 57% rename from src/mcp/tools/cfg.js rename to src/mcp/tools/cfg.ts index bb77c455..2ac4f7df 100644 --- a/src/mcp/tools/cfg.js +++ b/src/mcp/tools/cfg.ts @@ -1,14 +1,25 @@ import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'cfg'; -export async function handler(args, ctx) { +interface CfgArgs { + name: string; + format?: 'json' | 'dot' | 'mermaid'; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: CfgArgs, ctx: McpToolContext): Promise { const { cfgData, cfgToDOT, cfgToMermaid } = await import('../../features/cfg.js'); const cfgResult = cfgData(args.name, ctx.dbPath, { file: args.file, kind: args.kind, noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.query, ctx.MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['query'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), }); if (args.format === 'dot') { diff --git a/src/mcp/tools/check.js b/src/mcp/tools/check.ts similarity index 65% rename from src/mcp/tools/check.js rename to src/mcp/tools/check.ts index e1e587c6..b477a240 100644 --- a/src/mcp/tools/check.js +++ b/src/mcp/tools/check.ts @@ -1,8 +1,25 @@ import { effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'check'; -export async function handler(args, ctx) { +interface CheckArgs { + ref?: string; + staged?: boolean; + rules?: boolean; + cycles?: boolean; + blast_radius?: number; + signatures?: boolean; + boundaries?: boolean; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: CheckArgs, ctx: McpToolContext): Promise { const isDiffMode = args.ref || args.staged; if (!isDiffMode && !args.rules) { @@ -11,7 +28,7 @@ export async function handler(args, ctx) { file: args.file, noTests: args.no_tests, kind: args.kind, - limit: Math.min(args.limit ?? MCP_DEFAULTS.manifesto, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['manifesto'] ?? 100, MCP_MAX_LIMIT), offset: effectiveOffset(args), }); } @@ -34,7 +51,7 @@ export async function handler(args, ctx) { file: args.file, noTests: args.no_tests, kind: args.kind, - limit: Math.min(args.limit ?? MCP_DEFAULTS.manifesto, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['manifesto'] ?? 100, MCP_MAX_LIMIT), offset: effectiveOffset(args), }); return { check: checkResult, manifesto: manifestoResult }; diff --git a/src/mcp/tools/co-changes.js b/src/mcp/tools/co-changes.ts similarity index 69% rename from src/mcp/tools/co-changes.js rename to src/mcp/tools/co-changes.ts index 32efa4ac..3c0d284e 100644 --- a/src/mcp/tools/co-changes.js +++ b/src/mcp/tools/co-changes.ts @@ -1,8 +1,17 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'co_changes'; -export async function handler(args, ctx) { +interface CoChangesArgs { + file?: string; + limit?: number; + offset?: number; + min_jaccard?: number; + no_tests?: boolean; +} + +export async function handler(args: CoChangesArgs, ctx: McpToolContext): Promise { const { coChangeData, coChangeTopData } = await import('../../features/cochange.js'); return args.file ? coChangeData(args.file, ctx.dbPath, { diff --git a/src/mcp/tools/code-owners.js b/src/mcp/tools/code-owners.js deleted file mode 100644 index 7fe97d7d..00000000 --- a/src/mcp/tools/code-owners.js +++ /dev/null @@ -1,12 +0,0 @@ -export const name = 'code_owners'; - -export async function handler(args, ctx) { - const { ownersData } = await import('../../features/owners.js'); - return ownersData(ctx.dbPath, { - file: args.file, - owner: args.owner, - boundary: args.boundary, - kind: args.kind, - noTests: args.no_tests, - }); -} diff --git a/src/mcp/tools/code-owners.ts b/src/mcp/tools/code-owners.ts new file mode 100644 index 00000000..54d5513b --- /dev/null +++ b/src/mcp/tools/code-owners.ts @@ -0,0 +1,22 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'code_owners'; + +interface CodeOwnersArgs { + file?: string; + owner?: string; + boundary?: boolean; + kind?: string; + no_tests?: boolean; +} + +export async function handler(args: CodeOwnersArgs, ctx: McpToolContext): Promise { + const { ownersData } = await import('../../features/owners.js'); + return ownersData(ctx.dbPath, { + file: args.file, + owner: args.owner, + boundary: args.boundary, + kind: args.kind, + noTests: args.no_tests, + }); +} diff --git a/src/mcp/tools/communities.js b/src/mcp/tools/communities.ts similarity index 58% rename from src/mcp/tools/communities.js rename to src/mcp/tools/communities.ts index 1ec66a0b..6a4923d4 100644 --- a/src/mcp/tools/communities.js +++ b/src/mcp/tools/communities.ts @@ -1,8 +1,18 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'communities'; -export async function handler(args, ctx) { +interface CommunitiesArgs { + functions?: boolean; + resolution?: number; + drift?: boolean; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: CommunitiesArgs, ctx: McpToolContext): Promise { const { communitiesData } = await import('../../features/communities.js'); return communitiesData(ctx.dbPath, { functions: args.functions, diff --git a/src/mcp/tools/complexity.js b/src/mcp/tools/complexity.ts similarity index 56% rename from src/mcp/tools/complexity.js rename to src/mcp/tools/complexity.ts index 3c999773..e6559fee 100644 --- a/src/mcp/tools/complexity.js +++ b/src/mcp/tools/complexity.ts @@ -1,8 +1,21 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'complexity'; -export async function handler(args, ctx) { +interface ComplexityArgs { + name?: string; + file?: string; + limit?: number; + offset?: number; + sort?: string; + above_threshold?: boolean; + health?: boolean; + no_tests?: boolean; + kind?: string; +} + +export async function handler(args: ComplexityArgs, ctx: McpToolContext): Promise { const { complexityData } = await import('../../features/complexity.js'); return complexityData(ctx.dbPath, { target: args.name, @@ -11,7 +24,6 @@ export async function handler(args, ctx) { offset: effectiveOffset(args), sort: args.sort, aboveThreshold: args.above_threshold, - health: args.health, noTests: args.no_tests, kind: args.kind, }); diff --git a/src/mcp/tools/context.js b/src/mcp/tools/context.ts similarity index 55% rename from src/mcp/tools/context.js rename to src/mcp/tools/context.ts index d0f461db..0bed230e 100644 --- a/src/mcp/tools/context.js +++ b/src/mcp/tools/context.ts @@ -1,8 +1,21 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'context'; -export async function handler(args, ctx) { +interface ContextArgs { + name: string; + depth?: number; + file?: string; + kind?: string; + no_source?: boolean; + no_tests?: boolean; + include_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: ContextArgs, ctx: McpToolContext): Promise { const { contextData } = await ctx.getQueries(); return contextData(args.name, ctx.dbPath, { depth: args.depth, diff --git a/src/mcp/tools/dataflow.js b/src/mcp/tools/dataflow.ts similarity index 58% rename from src/mcp/tools/dataflow.js rename to src/mcp/tools/dataflow.ts index 39d84db9..3c6b0d03 100644 --- a/src/mcp/tools/dataflow.js +++ b/src/mcp/tools/dataflow.ts @@ -1,8 +1,20 @@ import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'dataflow'; -export async function handler(args, ctx) { +interface DataflowArgs { + mode?: string; + name: string; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: DataflowArgs, ctx: McpToolContext): Promise { const dfMode = args.mode || 'edges'; if (dfMode === 'impact') { const { dataflowImpactData } = await import('../../features/dataflow.js'); @@ -11,7 +23,7 @@ export async function handler(args, ctx) { file: args.file, kind: args.kind, noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.fn_impact, ctx.MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['fn_impact'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), }); } @@ -20,7 +32,7 @@ export async function handler(args, ctx) { file: args.file, kind: args.kind, noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.query, ctx.MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['query'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), }); } diff --git a/src/mcp/tools/diff-impact.js b/src/mcp/tools/diff-impact.ts similarity index 67% rename from src/mcp/tools/diff-impact.js rename to src/mcp/tools/diff-impact.ts index c3e50219..9852eec7 100644 --- a/src/mcp/tools/diff-impact.js +++ b/src/mcp/tools/diff-impact.ts @@ -1,8 +1,19 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'diff_impact'; -export async function handler(args, ctx) { +interface DiffImpactArgs { + format?: string; + staged?: boolean; + ref?: string; + depth?: number; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: DiffImpactArgs, ctx: McpToolContext): Promise { if (args.format === 'mermaid') { const { diffImpactMermaid } = await ctx.getQueries(); return diffImpactMermaid(ctx.dbPath, { diff --git a/src/mcp/tools/execution-flow.js b/src/mcp/tools/execution-flow.ts similarity index 69% rename from src/mcp/tools/execution-flow.js rename to src/mcp/tools/execution-flow.ts index d8fefb1c..db61d099 100644 --- a/src/mcp/tools/execution-flow.js +++ b/src/mcp/tools/execution-flow.ts @@ -1,8 +1,20 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'execution_flow'; -export async function handler(args, ctx) { +interface ExecutionFlowArgs { + list?: boolean; + name?: string; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: ExecutionFlowArgs, ctx: McpToolContext): Promise { if (args.list) { const { listEntryPointsData } = await import('../../features/flow.js'); return listEntryPointsData(ctx.dbPath, { diff --git a/src/mcp/tools/export-graph.js b/src/mcp/tools/export-graph.ts similarity index 79% rename from src/mcp/tools/export-graph.js rename to src/mcp/tools/export-graph.ts index e632086f..958c61f1 100644 --- a/src/mcp/tools/export-graph.js +++ b/src/mcp/tools/export-graph.ts @@ -1,17 +1,27 @@ import { findDbPath } from '../../db/index.js'; import { effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'export_graph'; -export async function handler(args, ctx) { +interface ExportGraphArgs { + format: string; + file_level?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: ExportGraphArgs, ctx: McpToolContext): Promise { const { exportDOT, exportGraphML, exportGraphSON, exportJSON, exportMermaid, exportNeo4jCSV } = await import('../../features/export.js'); const Database = ctx.getDatabase(); const db = new Database(findDbPath(ctx.dbPath), { readonly: true }); const fileLevel = args.file_level !== false; - const exportLimit = args.limit ? Math.min(args.limit, MCP_MAX_LIMIT) : MCP_DEFAULTS.export_graph; + const exportLimit = args.limit + ? Math.min(args.limit, MCP_MAX_LIMIT) + : (MCP_DEFAULTS['export_graph'] ?? 500); - let result; + let result: unknown; try { switch (args.format) { case 'dot': diff --git a/src/mcp/tools/file-deps.js b/src/mcp/tools/file-deps.ts similarity index 55% rename from src/mcp/tools/file-deps.js rename to src/mcp/tools/file-deps.ts index 7bf2f1c8..82ae0292 100644 --- a/src/mcp/tools/file-deps.js +++ b/src/mcp/tools/file-deps.ts @@ -1,8 +1,16 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'file_deps'; -export async function handler(args, ctx) { +interface FileDepsArgs { + file: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: FileDepsArgs, ctx: McpToolContext): Promise { const { fileDepsData } = await ctx.getQueries(); return fileDepsData(args.file, ctx.dbPath, { noTests: args.no_tests, diff --git a/src/mcp/tools/file-exports.js b/src/mcp/tools/file-exports.ts similarity index 55% rename from src/mcp/tools/file-exports.js rename to src/mcp/tools/file-exports.ts index 5c769136..a1738af6 100644 --- a/src/mcp/tools/file-exports.js +++ b/src/mcp/tools/file-exports.ts @@ -1,8 +1,17 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'file_exports'; -export async function handler(args, ctx) { +interface FileExportsArgs { + file: string; + no_tests?: boolean; + unused?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: FileExportsArgs, ctx: McpToolContext): Promise { const { exportsData } = await ctx.getQueries(); return exportsData(args.file, ctx.dbPath, { noTests: args.no_tests, diff --git a/src/mcp/tools/find-cycles.js b/src/mcp/tools/find-cycles.ts similarity index 66% rename from src/mcp/tools/find-cycles.js rename to src/mcp/tools/find-cycles.ts index 6a24f581..7a3b1c12 100644 --- a/src/mcp/tools/find-cycles.js +++ b/src/mcp/tools/find-cycles.ts @@ -1,9 +1,13 @@ import { findDbPath } from '../../db/index.js'; import { findCycles } from '../../domain/graph/cycles.js'; +import type { McpToolContext } from '../server.js'; export const name = 'find_cycles'; -export async function handler(_args, ctx) { +export async function handler( + _args: Record, + ctx: McpToolContext, +): Promise<{ cycles: string[][]; count: number }> { const Database = ctx.getDatabase(); const db = new Database(findDbPath(ctx.dbPath), { readonly: true }); try { diff --git a/src/mcp/tools/fn-impact.js b/src/mcp/tools/fn-impact.ts similarity index 55% rename from src/mcp/tools/fn-impact.js rename to src/mcp/tools/fn-impact.ts index fb41b664..71c24fe7 100644 --- a/src/mcp/tools/fn-impact.js +++ b/src/mcp/tools/fn-impact.ts @@ -1,8 +1,19 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'fn_impact'; -export async function handler(args, ctx) { +interface FnImpactArgs { + name: string; + depth?: number; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: FnImpactArgs, ctx: McpToolContext): Promise { const { fnImpactData } = await ctx.getQueries(); return fnImpactData(args.name, ctx.dbPath, { depth: args.depth, diff --git a/src/mcp/tools/impact-analysis.js b/src/mcp/tools/impact-analysis.js deleted file mode 100644 index 10e6b957..00000000 --- a/src/mcp/tools/impact-analysis.js +++ /dev/null @@ -1,8 +0,0 @@ -export const name = 'impact_analysis'; - -export async function handler(args, ctx) { - const { impactAnalysisData } = await ctx.getQueries(); - return impactAnalysisData(args.file, ctx.dbPath, { - noTests: args.no_tests, - }); -} diff --git a/src/mcp/tools/impact-analysis.ts b/src/mcp/tools/impact-analysis.ts new file mode 100644 index 00000000..f667ff4d --- /dev/null +++ b/src/mcp/tools/impact-analysis.ts @@ -0,0 +1,15 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'impact_analysis'; + +interface ImpactAnalysisArgs { + file: string; + no_tests?: boolean; +} + +export async function handler(args: ImpactAnalysisArgs, ctx: McpToolContext): Promise { + const { impactAnalysisData } = await ctx.getQueries(); + return impactAnalysisData(args.file, ctx.dbPath, { + noTests: args.no_tests, + }); +} diff --git a/src/mcp/tools/implementations.js b/src/mcp/tools/implementations.ts similarity index 55% rename from src/mcp/tools/implementations.js rename to src/mcp/tools/implementations.ts index cad07967..34071f0b 100644 --- a/src/mcp/tools/implementations.js +++ b/src/mcp/tools/implementations.ts @@ -1,8 +1,18 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'implementations'; -export async function handler(args, ctx) { +interface ImplementationsArgs { + name: string; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: ImplementationsArgs, ctx: McpToolContext): Promise { const { implementationsData } = await ctx.getQueries(); return implementationsData(args.name, ctx.dbPath, { file: args.file, diff --git a/src/mcp/tools/index.js b/src/mcp/tools/index.ts similarity index 89% rename from src/mcp/tools/index.js rename to src/mcp/tools/index.ts index 65c0f0d8..d57412a1 100644 --- a/src/mcp/tools/index.js +++ b/src/mcp/tools/index.ts @@ -2,6 +2,14 @@ * Barrel module — registers all MCP tool handlers. */ +import type { McpToolContext } from '../server.js'; + +export interface McpToolHandler { + name: string; + // biome-ignore lint/suspicious/noExplicitAny: tool arg types vary per handler + handler(args: any, ctx: McpToolContext): Promise; +} + import * as astQuery from './ast-query.js'; import * as audit from './audit.js'; import * as batchQuery from './batch-query.js'; @@ -38,7 +46,7 @@ import * as symbolChildren from './symbol-children.js'; import * as triage from './triage.js'; import * as where from './where.js'; -export const TOOL_HANDLERS = new Map([ +export const TOOL_HANDLERS = new Map([ [query.name, query], [path.name, path], [fileDeps.name, fileDeps], diff --git a/src/mcp/tools/interfaces.js b/src/mcp/tools/interfaces.ts similarity index 55% rename from src/mcp/tools/interfaces.js rename to src/mcp/tools/interfaces.ts index 518925b3..6dd4da6f 100644 --- a/src/mcp/tools/interfaces.js +++ b/src/mcp/tools/interfaces.ts @@ -1,8 +1,18 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'interfaces'; -export async function handler(args, ctx) { +interface InterfacesArgs { + name: string; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: InterfacesArgs, ctx: McpToolContext): Promise { const { interfacesData } = await ctx.getQueries(); return interfacesData(args.name, ctx.dbPath, { file: args.file, diff --git a/src/mcp/tools/list-functions.js b/src/mcp/tools/list-functions.ts similarity index 56% rename from src/mcp/tools/list-functions.js rename to src/mcp/tools/list-functions.ts index ef518cdf..267557e8 100644 --- a/src/mcp/tools/list-functions.js +++ b/src/mcp/tools/list-functions.ts @@ -1,8 +1,17 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'list_functions'; -export async function handler(args, ctx) { +interface ListFunctionsArgs { + file?: string; + pattern?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: ListFunctionsArgs, ctx: McpToolContext): Promise { const { listFunctionsData } = await ctx.getQueries(); return listFunctionsData(ctx.dbPath, { file: args.file, diff --git a/src/mcp/tools/list-repos.js b/src/mcp/tools/list-repos.js deleted file mode 100644 index 1cd18d3d..00000000 --- a/src/mcp/tools/list-repos.js +++ /dev/null @@ -1,11 +0,0 @@ -export const name = 'list_repos'; - -export async function handler(_args, ctx) { - const { listRepos, pruneRegistry } = await import('../../infrastructure/registry.js'); - pruneRegistry(); - let repos = listRepos(); - if (ctx.allowedRepos) { - repos = repos.filter((r) => ctx.allowedRepos.includes(r.name)); - } - return { repos }; -} diff --git a/src/mcp/tools/list-repos.ts b/src/mcp/tools/list-repos.ts new file mode 100644 index 00000000..471317cd --- /dev/null +++ b/src/mcp/tools/list-repos.ts @@ -0,0 +1,17 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'list_repos'; + +interface ListReposArgs { + [key: string]: unknown; +} + +export async function handler(_args: ListReposArgs, ctx: McpToolContext): Promise { + const { listRepos, pruneRegistry } = await import('../../infrastructure/registry.js'); + pruneRegistry(); + let repos = listRepos(); + if (ctx.allowedRepos) { + repos = repos.filter((r: { name: string }) => (ctx.allowedRepos as string[]).includes(r.name)); + } + return { repos }; +} diff --git a/src/mcp/tools/module-map.js b/src/mcp/tools/module-map.js deleted file mode 100644 index ae7f067a..00000000 --- a/src/mcp/tools/module-map.js +++ /dev/null @@ -1,6 +0,0 @@ -export const name = 'module_map'; - -export async function handler(args, ctx) { - const { moduleMapData } = await ctx.getQueries(); - return moduleMapData(ctx.dbPath, args.limit || 20, { noTests: args.no_tests }); -} diff --git a/src/mcp/tools/module-map.ts b/src/mcp/tools/module-map.ts new file mode 100644 index 00000000..4dd0af0c --- /dev/null +++ b/src/mcp/tools/module-map.ts @@ -0,0 +1,13 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'module_map'; + +interface ModuleMapArgs { + limit?: number; + no_tests?: boolean; +} + +export async function handler(args: ModuleMapArgs, ctx: McpToolContext): Promise { + const { moduleMapData } = await ctx.getQueries(); + return moduleMapData(ctx.dbPath, args.limit || 20, { noTests: args.no_tests }); +} diff --git a/src/mcp/tools/node-roles.js b/src/mcp/tools/node-roles.ts similarity index 55% rename from src/mcp/tools/node-roles.js rename to src/mcp/tools/node-roles.ts index ba754480..45f0c452 100644 --- a/src/mcp/tools/node-roles.js +++ b/src/mcp/tools/node-roles.ts @@ -1,8 +1,17 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'node_roles'; -export async function handler(args, ctx) { +interface NodeRolesArgs { + role?: string; + file?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: NodeRolesArgs, ctx: McpToolContext): Promise { const { rolesData } = await ctx.getQueries(); return rolesData(ctx.dbPath, { role: args.role, diff --git a/src/mcp/tools/path.js b/src/mcp/tools/path.js deleted file mode 100644 index 789c1508..00000000 --- a/src/mcp/tools/path.js +++ /dev/null @@ -1,12 +0,0 @@ -export const name = 'path'; - -export async function handler(args, ctx) { - const { pathData } = await ctx.getQueries(); - return pathData(args.from, args.to, ctx.dbPath, { - maxDepth: args.depth ?? 10, - edgeKinds: args.edge_kinds, - fromFile: args.from_file, - toFile: args.to_file, - noTests: args.no_tests, - }); -} diff --git a/src/mcp/tools/path.ts b/src/mcp/tools/path.ts new file mode 100644 index 00000000..7031fa4c --- /dev/null +++ b/src/mcp/tools/path.ts @@ -0,0 +1,24 @@ +import type { McpToolContext } from '../server.js'; + +export const name = 'path'; + +interface PathArgs { + from: string; + to: string; + depth?: number; + edge_kinds?: string[]; + from_file?: string; + to_file?: string; + no_tests?: boolean; +} + +export async function handler(args: PathArgs, ctx: McpToolContext): Promise { + const { pathData } = await ctx.getQueries(); + return pathData(args.from, args.to, ctx.dbPath, { + maxDepth: args.depth ?? 10, + edgeKinds: args.edge_kinds, + fromFile: args.from_file, + toFile: args.to_file, + noTests: args.no_tests, + }); +} diff --git a/src/mcp/tools/query.js b/src/mcp/tools/query.ts similarity index 60% rename from src/mcp/tools/query.js rename to src/mcp/tools/query.ts index a89bede2..dcf3eb30 100644 --- a/src/mcp/tools/query.js +++ b/src/mcp/tools/query.ts @@ -1,8 +1,25 @@ import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'query'; -export async function handler(args, ctx) { +interface QueryArgs { + name: string; + mode?: string; + to?: string; + depth?: number; + edge_kinds?: string[]; + reverse?: boolean; + from_file?: string; + to_file?: string; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: QueryArgs, ctx: McpToolContext): Promise { const { fnDepsData, pathData } = await ctx.getQueries(); const qMode = args.mode || 'deps'; if (qMode === 'path') { @@ -24,7 +41,7 @@ export async function handler(args, ctx) { file: args.file, kind: args.kind, noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.query, ctx.MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['query'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), }); } diff --git a/src/mcp/tools/semantic-search.js b/src/mcp/tools/semantic-search.ts similarity index 78% rename from src/mcp/tools/semantic-search.js rename to src/mcp/tools/semantic-search.ts index 6d965b2c..57dbde1d 100644 --- a/src/mcp/tools/semantic-search.js +++ b/src/mcp/tools/semantic-search.ts @@ -1,11 +1,20 @@ import { effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'semantic_search'; -export async function handler(args, ctx) { +interface SemanticSearchArgs { + query: string; + mode?: string; + limit?: number; + offset?: number; + min_score?: number; +} + +export async function handler(args: SemanticSearchArgs, ctx: McpToolContext): Promise { const mode = args.mode || 'hybrid'; const searchOpts = { - limit: Math.min(args.limit ?? MCP_DEFAULTS.semantic_search, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['semantic_search'] ?? 100, MCP_MAX_LIMIT), offset: effectiveOffset(args), minScore: args.min_score, }; @@ -46,7 +55,7 @@ export async function handler(args, ctx) { // hybrid (default) — falls back to semantic if no FTS5 const { hybridSearchData, searchData } = await import('../../domain/search/index.js'); - let result = await hybridSearchData(args.query, ctx.dbPath, searchOpts); + let result: unknown = await hybridSearchData(args.query, ctx.dbPath, searchOpts); if (result === null) { result = await searchData(args.query, ctx.dbPath, searchOpts); if (result === null) { diff --git a/src/mcp/tools/sequence.js b/src/mcp/tools/sequence.ts similarity index 54% rename from src/mcp/tools/sequence.js rename to src/mcp/tools/sequence.ts index 8209cb36..e0ba93ea 100644 --- a/src/mcp/tools/sequence.js +++ b/src/mcp/tools/sequence.ts @@ -1,8 +1,21 @@ import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'sequence'; -export async function handler(args, ctx) { +interface SequenceArgs { + name: string; + depth?: number; + file?: string; + kind?: string; + dataflow?: boolean; + no_tests?: boolean; + limit?: number; + offset?: number; + format?: string; +} + +export async function handler(args: SequenceArgs, ctx: McpToolContext): Promise { const { sequenceData, sequenceToMermaid } = await import('../../features/sequence.js'); const seqResult = sequenceData(args.name, ctx.dbPath, { depth: args.depth, @@ -10,7 +23,7 @@ export async function handler(args, ctx) { kind: args.kind, dataflow: args.dataflow, noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.execution_flow, ctx.MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['execution_flow'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), }); return args.format === 'json' ? seqResult : { text: sequenceToMermaid(seqResult), ...seqResult }; diff --git a/src/mcp/tools/structure.js b/src/mcp/tools/structure.ts similarity index 57% rename from src/mcp/tools/structure.js rename to src/mcp/tools/structure.ts index 51dc453f..88326612 100644 --- a/src/mcp/tools/structure.js +++ b/src/mcp/tools/structure.ts @@ -1,8 +1,18 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'structure'; -export async function handler(args, ctx) { +interface StructureArgs { + directory?: string; + depth?: number; + sort?: string; + full?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: StructureArgs, ctx: McpToolContext): Promise { const { structureData } = await import('../../features/structure.js'); return structureData(ctx.dbPath, { directory: args.directory, diff --git a/src/mcp/tools/symbol-children.js b/src/mcp/tools/symbol-children.js deleted file mode 100644 index 45effcb1..00000000 --- a/src/mcp/tools/symbol-children.js +++ /dev/null @@ -1,14 +0,0 @@ -import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; - -export const name = 'symbol_children'; - -export async function handler(args, ctx) { - const { childrenData } = await ctx.getQueries(); - return childrenData(args.name, ctx.dbPath, { - file: args.file, - kind: args.kind, - noTests: args.no_tests, - limit: Math.min(args.limit ?? MCP_DEFAULTS.context, ctx.MCP_MAX_LIMIT), - offset: effectiveOffset(args), - }); -} diff --git a/src/mcp/tools/symbol-children.ts b/src/mcp/tools/symbol-children.ts new file mode 100644 index 00000000..466081d3 --- /dev/null +++ b/src/mcp/tools/symbol-children.ts @@ -0,0 +1,24 @@ +import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; + +export const name = 'symbol_children'; + +interface SymbolChildrenArgs { + name: string; + file?: string; + kind?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: SymbolChildrenArgs, ctx: McpToolContext): Promise { + const { childrenData } = await ctx.getQueries(); + return childrenData(args.name, ctx.dbPath, { + file: args.file, + kind: args.kind, + noTests: args.no_tests, + limit: Math.min(args.limit ?? MCP_DEFAULTS['context'] ?? 100, ctx.MCP_MAX_LIMIT), + offset: effectiveOffset(args), + }); +} diff --git a/src/mcp/tools/triage.js b/src/mcp/tools/triage.ts similarity index 60% rename from src/mcp/tools/triage.js rename to src/mcp/tools/triage.ts index b153a76b..fc73bd2a 100644 --- a/src/mcp/tools/triage.js +++ b/src/mcp/tools/triage.ts @@ -1,21 +1,35 @@ import { effectiveLimit, effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'triage'; -export async function handler(args, ctx) { +interface TriageArgs { + level?: string; + sort?: string; + min_score?: number; + role?: string; + file?: string; + kind?: string; + no_tests?: boolean; + weights?: Record; + limit?: number; + offset?: number; +} + +export async function handler(args: TriageArgs, ctx: McpToolContext): Promise { if (args.level === 'file' || args.level === 'directory') { const { hotspotsData } = await import('../../features/structure.js'); - const TRIAGE_TO_HOTSPOT = { + const TRIAGE_TO_HOTSPOT: Record = { risk: 'fan-in', complexity: 'density', churn: 'coupling', mi: 'fan-in', }; - const metric = TRIAGE_TO_HOTSPOT[args.sort] ?? args.sort; + const metric = TRIAGE_TO_HOTSPOT[args.sort as string] ?? args.sort; return hotspotsData(ctx.dbPath, { metric, level: args.level, - limit: Math.min(args.limit ?? MCP_DEFAULTS.hotspots, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['hotspots'] ?? 100, MCP_MAX_LIMIT), offset: effectiveOffset(args), noTests: args.no_tests, }); diff --git a/src/mcp/tools/where.js b/src/mcp/tools/where.ts similarity index 55% rename from src/mcp/tools/where.js rename to src/mcp/tools/where.ts index 11f74203..4f514145 100644 --- a/src/mcp/tools/where.js +++ b/src/mcp/tools/where.ts @@ -1,8 +1,17 @@ import { effectiveLimit, effectiveOffset } from '../middleware.js'; +import type { McpToolContext } from '../server.js'; export const name = 'where'; -export async function handler(args, ctx) { +interface WhereArgs { + target: string; + file_mode?: string; + no_tests?: boolean; + limit?: number; + offset?: number; +} + +export async function handler(args: WhereArgs, ctx: McpToolContext): Promise { const { whereData } = await ctx.getQueries(); return whereData(args.target, ctx.dbPath, { file: args.file_mode, diff --git a/src/presentation/audit.js b/src/presentation/audit.ts similarity index 84% rename from src/presentation/audit.js rename to src/presentation/audit.ts index 13e4d488..d77a8fb1 100644 --- a/src/presentation/audit.js +++ b/src/presentation/audit.ts @@ -2,11 +2,24 @@ import { kindIcon } from '../domain/queries.js'; import { auditData } from '../features/audit.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI formatter for the audit command. - */ -export function audit(target, customDbPath, opts = {}) { - const data = auditData(target, customDbPath, opts); +interface AuditOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + file?: string; + kind?: string; + quick?: boolean; + limit?: number; + offset?: number; +} + +export function audit( + target: string, + customDbPath: string | undefined, + opts: AuditOpts = {}, +): void { + // biome-ignore lint/suspicious/noExplicitAny: audit data shape is dynamic + const data: any = auditData(target, customDbPath, opts); if (outputResult(data, null, opts)) return; @@ -60,7 +73,9 @@ export function audit(target, customDbPath, opts = {}) { // Impact console.log(`\n Impact: ${fn.impact.totalDependents} transitive dependent(s)`); for (const [level, nodes] of Object.entries(fn.impact.levels)) { - console.log(` Level ${level}: ${nodes.map((n) => n.name).join(', ')}`); + console.log( + ` Level ${level}: ${(nodes as Array<{ name: string }>).map((n) => n.name).join(', ')}`, + ); } // Call edges diff --git a/src/presentation/batch.js b/src/presentation/batch.js deleted file mode 100644 index bec40cf9..00000000 --- a/src/presentation/batch.js +++ /dev/null @@ -1,26 +0,0 @@ -import { batchData, multiBatchData } from '../features/batch.js'; - -/** - * CLI wrapper — calls batchData and prints JSON to stdout. - */ -export function batch(command, targets, customDbPath, opts = {}) { - const data = batchData(command, targets, customDbPath, opts); - console.log(JSON.stringify(data, null, 2)); -} - -/** - * CLI wrapper for batch-query — detects multi-command mode (objects with .command) - * or falls back to single-command batchData (default: 'where'). - */ -export function batchQuery(targets, customDbPath, opts = {}) { - const { command: defaultCommand = 'where', ...rest } = opts; - const isMulti = targets.length > 0 && typeof targets[0] === 'object' && targets[0].command; - - let data; - if (isMulti) { - data = multiBatchData(targets, customDbPath, rest); - } else { - data = batchData(defaultCommand, targets, customDbPath, rest); - } - console.log(JSON.stringify(data, null, 2)); -} diff --git a/src/presentation/batch.ts b/src/presentation/batch.ts new file mode 100644 index 00000000..d073f5ef --- /dev/null +++ b/src/presentation/batch.ts @@ -0,0 +1,49 @@ +import { batchData, multiBatchData } from '../features/batch.js'; + +interface BatchOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + file?: string; + kind?: string; + command?: string; + limit?: number; + offset?: number; + [key: string]: unknown; +} + +interface MultiBatchTarget { + command: string; + target: string; + [key: string]: unknown; +} + +export function batch( + command: string, + targets: string[], + customDbPath: string | undefined, + opts: BatchOpts = {}, +): void { + const data = batchData(command, targets, customDbPath, opts); + console.log(JSON.stringify(data, null, 2)); +} + +export function batchQuery( + targets: Array, + customDbPath: string | undefined, + opts: BatchOpts = {}, +): void { + const { command: defaultCommand = 'where', ...rest } = opts; + const isMulti = + targets.length > 0 && + typeof targets[0] === 'object' && + (targets[0] as MultiBatchTarget).command; + + let data: unknown; + if (isMulti) { + data = multiBatchData(targets as MultiBatchTarget[], customDbPath, rest); + } else { + data = batchData(defaultCommand, targets as string[], customDbPath, rest); + } + console.log(JSON.stringify(data, null, 2)); +} diff --git a/src/presentation/branch-compare.js b/src/presentation/branch-compare.ts similarity index 64% rename from src/presentation/branch-compare.js rename to src/presentation/branch-compare.ts index c90394fb..de01aeb8 100644 --- a/src/presentation/branch-compare.js +++ b/src/presentation/branch-compare.ts @@ -4,10 +4,42 @@ import { outputResult } from '../infrastructure/result-formatter.js'; // ─── Text Formatting ──────────────────────────────────────────────────── -function formatText(data) { +interface BranchCompareSymbol { + kind: string; + name: string; + file: string; + line: number; + impact?: unknown[]; + changes?: { lineCount: number; fanIn: number; fanOut: number }; + base?: { lineCount: number; fanIn: number; fanOut: number; line: number }; + target?: { lineCount: number; fanIn: number; fanOut: number }; +} + +interface BranchCompareSummary { + added: number; + removed: number; + changed: number; + totalImpacted: number; + filesAffected: number; +} + +interface BranchCompareFormatData { + error?: string; + baseSha: string; + targetSha: string; + baseRef: string; + targetRef: string; + changedFiles: string[]; + added: BranchCompareSymbol[]; + removed: BranchCompareSymbol[]; + changed: BranchCompareSymbol[]; + summary: BranchCompareSummary; +} + +function formatText(data: BranchCompareFormatData): string { if (data.error) return `Error: ${data.error}`; - const lines = []; + const lines: string[] = []; const shortBase = data.baseSha.slice(0, 7); const shortTarget = data.targetSha.slice(0, 7); @@ -45,19 +77,19 @@ function formatText(data) { ` ~ Changed (${data.changed.length} symbol${data.changed.length !== 1 ? 's' : ''}):`, ); for (const sym of data.changed) { - const parts = []; - if (sym.changes.lineCount !== 0) { - parts.push(`lines: ${sym.base.lineCount} -> ${sym.target.lineCount}`); + const parts: string[] = []; + if (sym.changes?.lineCount !== 0) { + parts.push(`lines: ${sym.base?.lineCount} -> ${sym.target?.lineCount}`); } - if (sym.changes.fanIn !== 0) { - parts.push(`fan_in: ${sym.base.fanIn} -> ${sym.target.fanIn}`); + if (sym.changes?.fanIn !== 0) { + parts.push(`fan_in: ${sym.base?.fanIn} -> ${sym.target?.fanIn}`); } - if (sym.changes.fanOut !== 0) { - parts.push(`fan_out: ${sym.base.fanOut} -> ${sym.target.fanOut}`); + if (sym.changes?.fanOut !== 0) { + parts.push(`fan_out: ${sym.base?.fanOut} -> ${sym.target?.fanOut}`); } const detail = parts.length > 0 ? ` (${parts.join(', ')})` : ''; lines.push( - ` [${kindIcon(sym.kind)}] ${sym.name} -- ${sym.file}:${sym.base.line}${detail}`, + ` [${kindIcon(sym.kind)}] ${sym.name} -- ${sym.file}:${sym.base?.line}${detail}`, ); if (sym.impact && sym.impact.length > 0) { lines.push( @@ -82,7 +114,20 @@ function formatText(data) { // ─── CLI Display Function ─────────────────────────────────────────────── -export async function branchCompare(baseRef, targetRef, opts = {}) { +interface BranchCompareCliOpts { + json?: boolean; + ndjson?: boolean; + format?: string; + repoRoot?: string; + noTests?: boolean; + dbPath?: string; +} + +export async function branchCompare( + baseRef: string, + targetRef: string, + opts: BranchCompareCliOpts = {}, +): Promise { const data = await branchCompareData(baseRef, targetRef, opts); if (opts.format === 'json') opts = { ...opts, json: true }; @@ -93,5 +138,5 @@ export async function branchCompare(baseRef, targetRef, opts = {}) { return; } - console.log(formatText(data)); + console.log(formatText(data as unknown as BranchCompareFormatData)); } diff --git a/src/presentation/brief.js b/src/presentation/brief.ts similarity index 67% rename from src/presentation/brief.js rename to src/presentation/brief.ts index 17f94566..efec070a 100644 --- a/src/presentation/brief.js +++ b/src/presentation/brief.ts @@ -1,18 +1,30 @@ import { briefData } from '../domain/analysis/brief.js'; import { outputResult } from './result-formatter.js'; -/** - * Format a compact brief for hook context injection. - * Single-block, token-efficient output. - * - * Example: - * src/domain/graph/builder.js [HIGH RISK] - * Symbols: buildGraph [core, 12 callers], collectFiles [leaf, 2 callers] - * Imports: src/db/index.js, src/domain/parser.js - * Imported by: src/cli/commands/build.js (+8 transitive) - */ -export function brief(file, customDbPath, opts = {}) { - const data = briefData(file, customDbPath, opts); +interface BriefOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + config?: unknown; +} + +interface BriefSymbol { + name: string; + role?: string; + callerCount: number; +} + +interface BriefResult { + file: string; + risk: string; + symbols: BriefSymbol[]; + imports: string[]; + importedBy: string[]; + totalImporterCount: number; +} + +export function brief(file: string, customDbPath: string | undefined, opts: BriefOpts = {}): void { + const data = briefData(file, customDbPath as string, opts); if (outputResult(data, 'results', opts)) return; if (data.results.length === 0) { @@ -20,13 +32,13 @@ export function brief(file, customDbPath, opts = {}) { return; } - for (const r of data.results) { + for (const r of data.results as BriefResult[]) { console.log(`${r.file} [${r.risk.toUpperCase()} RISK]`); // Symbols line if (r.symbols.length > 0) { const parts = r.symbols.map((s) => { - const tags = []; + const tags: string[] = []; if (s.role) tags.push(s.role); tags.push(`${s.callerCount} caller${s.callerCount !== 1 ? 's' : ''}`); return `${s.name} [${tags.join(', ')}]`; diff --git a/src/presentation/cfg.js b/src/presentation/cfg.ts similarity index 66% rename from src/presentation/cfg.js rename to src/presentation/cfg.ts index 97b1e414..7970a413 100644 --- a/src/presentation/cfg.js +++ b/src/presentation/cfg.ts @@ -1,10 +1,42 @@ import { cfgData, cfgToDOT, cfgToMermaid } from '../features/cfg.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI display for cfg command. - */ -export function cfg(name, customDbPath, opts = {}) { +interface CfgCliOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + file?: string; + kind?: string; + format?: string; + limit?: number; + offset?: number; +} + +interface CfgBlock { + index: number; + type: string; + label?: string; + startLine?: number; + endLine?: number; +} + +interface CfgEdge { + source: number; + target: number; + kind: string; +} + +interface CfgResultEntry { + kind: string; + name: string; + file: string; + line: number; + summary: { blockCount: number; edgeCount: number }; + blocks: CfgBlock[]; + edges: CfgEdge[]; +} + +export function cfg(name: string, customDbPath: string | undefined, opts: CfgCliOpts = {}): void { const data = cfgData(name, customDbPath, opts); if (outputResult(data, 'results', opts)) return; @@ -29,7 +61,7 @@ export function cfg(name, customDbPath, opts = {}) { } // Text format - for (const r of data.results) { + for (const r of data.results as CfgResultEntry[]) { console.log(`\n${r.kind} ${r.name} (${r.file}:${r.line})`); console.log('\u2500'.repeat(60)); console.log(` Blocks: ${r.summary.blockCount} Edges: ${r.summary.edgeCount}`); diff --git a/src/presentation/check.js b/src/presentation/check.ts similarity index 72% rename from src/presentation/check.js rename to src/presentation/check.ts index 1dc0c4fc..30515008 100644 --- a/src/presentation/check.js +++ b/src/presentation/check.ts @@ -2,11 +2,52 @@ import { checkData } from '../features/check.js'; import { outputResult } from '../infrastructure/result-formatter.js'; import { AnalysisError } from '../shared/errors.js'; -/** - * CLI formatter — prints check results and sets exitCode 1 on failure. - */ -export function check(customDbPath, opts = {}) { - const data = checkData(customDbPath, opts); +interface CheckCliOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + base?: string; + maxCyclomatic?: number; + maxBlastRadius?: number; + boundaries?: boolean; + signatures?: boolean; +} + +interface CheckViolation { + name: string; + kind: string; + file: string; + line: number; + transitiveCallers?: number; + from?: string; + to?: string; + edgeKind?: string; +} + +interface CheckPredicate { + passed: boolean; + name: string; + note?: string; + cycles?: string[][]; + violations?: CheckViolation[]; + threshold?: number; +} + +interface CheckDataResult { + error?: string; + passed: boolean; + predicates: CheckPredicate[]; + summary: { + changedFiles: number; + newFiles: number; + total: number; + passed: number; + failed: number; + }; +} + +export function check(customDbPath: string | undefined, opts: CheckCliOpts = {}): void { + const data = checkData(customDbPath, opts) as CheckDataResult; if (data.error) { throw new AnalysisError(data.error); diff --git a/src/presentation/cochange.js b/src/presentation/cochange.ts similarity index 58% rename from src/presentation/cochange.js rename to src/presentation/cochange.ts index 21802cc2..4fc35985 100644 --- a/src/presentation/cochange.js +++ b/src/presentation/cochange.ts @@ -1,11 +1,21 @@ -/** - * Format co-change data for CLI output (single file). - */ -export function formatCoChange(data) { +interface CoChangePartner { + jaccard: number; + commitCount: number; + file: string; +} + +interface CoChangeData { + error?: string; + file: string; + partners: CoChangePartner[]; + meta?: { analyzedAt?: string; since?: string }; +} + +export function formatCoChange(data: CoChangeData): string { if (data.error) return data.error; if (data.partners.length === 0) return `No co-change partners found for ${data.file}`; - const lines = [`\nCo-change partners for ${data.file}:\n`]; + const lines: string[] = [`\nCo-change partners for ${data.file}:\n`]; for (const p of data.partners) { const pct = `${(p.jaccard * 100).toFixed(0)}%`.padStart(4); const commits = `${p.commitCount} commits`.padStart(12); @@ -17,14 +27,24 @@ export function formatCoChange(data) { return lines.join('\n'); } -/** - * Format top co-change pairs for CLI output (global view). - */ -export function formatCoChangeTop(data) { +interface CoChangePair { + jaccard: number; + commitCount: number; + fileA: string; + fileB: string; +} + +interface CoChangeTopData { + error?: string; + pairs: CoChangePair[]; + meta?: { analyzedAt?: string; since?: string }; +} + +export function formatCoChangeTop(data: CoChangeTopData): string { if (data.error) return data.error; if (data.pairs.length === 0) return 'No co-change pairs found.'; - const lines = ['\nTop co-change pairs:\n']; + const lines: string[] = ['\nTop co-change pairs:\n']; for (const p of data.pairs) { const pct = `${(p.jaccard * 100).toFixed(0)}%`.padStart(4); const commits = `${p.commitCount} commits`.padStart(12); diff --git a/src/presentation/communities.js b/src/presentation/communities.ts similarity index 69% rename from src/presentation/communities.js rename to src/presentation/communities.ts index ae1e8fb7..eb8ecf07 100644 --- a/src/presentation/communities.js +++ b/src/presentation/communities.ts @@ -1,11 +1,51 @@ import { communitiesData } from '../features/communities.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI entry point: run community detection and print results. - */ -export function communities(customDbPath, opts = {}) { - const data = communitiesData(customDbPath, opts); +interface CommunitiesCliOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + functions?: boolean; + resolution?: number; + drift?: boolean; +} + +interface CommunityMember { + name: string; + kind?: string; + file: string; +} + +interface Community { + id: number; + size: number; + directories: Record; + members?: CommunityMember[]; +} + +interface DriftAnalysis { + splitCandidates: Array<{ directory: string; communityCount: number }>; + mergeCandidates: Array<{ + communityId: number; + size: number; + directoryCount: number; + directories: string[]; + }>; +} + +interface CommunitiesResult { + summary: { + communityCount: number; + nodeCount: number; + modularity: number; + driftScore: number; + }; + communities: Community[]; + drift: DriftAnalysis; +} + +export function communities(customDbPath: string | undefined, opts: CommunitiesCliOpts = {}): void { + const data = communitiesData(customDbPath, opts) as unknown as CommunitiesResult; if (outputResult(data, 'communities', opts)) return; diff --git a/src/presentation/complexity.js b/src/presentation/complexity.ts similarity index 76% rename from src/presentation/complexity.js rename to src/presentation/complexity.ts index f2de7f6a..4efd3d4e 100644 --- a/src/presentation/complexity.js +++ b/src/presentation/complexity.ts @@ -1,11 +1,54 @@ import { complexityData } from '../features/complexity.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * Format complexity output for CLI display. - */ -export function complexity(customDbPath, opts = {}) { - const data = complexityData(customDbPath, opts); +interface ComplexityCliOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + target?: string; + limit?: number; + sort?: string; + aboveThreshold?: boolean; + health?: boolean; + file?: string; + kind?: string; + offset?: number; +} + +interface ComplexityFunction { + name: string; + file: string; + cognitive: number; + cyclomatic: number; + maxNesting: number; + maintainabilityIndex: number; + loc: number; + sloc: number; + exceeds?: string[]; + halstead: { + volume: number; + difficulty: number; + effort: number; + bugs: number; + }; +} + +interface ComplexitySummary { + analyzed: number; + avgCognitive: number; + avgCyclomatic: number; + avgMI?: number; + aboveWarn: number; +} + +interface ComplexityResult { + functions: ComplexityFunction[]; + summary: ComplexitySummary | null; + hasGraph: boolean; +} + +export function complexity(customDbPath: string | undefined, opts: ComplexityCliOpts = {}): void { + const data = complexityData(customDbPath, opts) as unknown as ComplexityResult; if (outputResult(data, 'functions', opts)) return; diff --git a/src/presentation/dataflow.js b/src/presentation/dataflow.js deleted file mode 100644 index fafcfb21..00000000 --- a/src/presentation/dataflow.js +++ /dev/null @@ -1,110 +0,0 @@ -import { dataflowData, dataflowImpactData } from '../features/dataflow.js'; -import { outputResult } from '../infrastructure/result-formatter.js'; - -/** - * CLI display for dataflow command. - */ -export function dataflow(name, customDbPath, opts = {}) { - if (opts.impact) { - return dataflowImpact(name, customDbPath, opts); - } - - const data = dataflowData(name, customDbPath, opts); - - if (outputResult(data, 'results', opts)) return; - - if (data.warning) { - console.log(`⚠ ${data.warning}`); - return; - } - if (data.results.length === 0) { - console.log(`No symbols matching "${name}".`); - return; - } - - for (const r of data.results) { - console.log(`\n${r.kind} ${r.name} (${r.file}:${r.line})`); - console.log('─'.repeat(60)); - - if (r.flowsTo.length > 0) { - console.log('\n Data flows TO:'); - for (const f of r.flowsTo) { - const conf = f.confidence < 1.0 ? ` [${(f.confidence * 100).toFixed(0)}%]` : ''; - console.log(` → ${f.target} (${f.file}:${f.line}) arg[${f.paramIndex}]${conf}`); - } - } - - if (r.flowsFrom.length > 0) { - console.log('\n Data flows FROM:'); - for (const f of r.flowsFrom) { - const conf = f.confidence < 1.0 ? ` [${(f.confidence * 100).toFixed(0)}%]` : ''; - console.log(` ← ${f.source} (${f.file}:${f.line}) arg[${f.paramIndex}]${conf}`); - } - } - - if (r.returns.length > 0) { - console.log('\n Return value consumed by:'); - for (const c of r.returns) { - console.log(` → ${c.consumer} (${c.file}:${c.line}) ${c.expression}`); - } - } - - if (r.returnedBy.length > 0) { - console.log('\n Uses return value of:'); - for (const p of r.returnedBy) { - console.log(` ← ${p.producer} (${p.file}:${p.line}) ${p.expression}`); - } - } - - if (r.mutates.length > 0) { - console.log('\n Mutates:'); - for (const m of r.mutates) { - console.log(` ✎ ${m.expression} (line ${m.line})`); - } - } - - if (r.mutatedBy.length > 0) { - console.log('\n Mutated by:'); - for (const m of r.mutatedBy) { - console.log(` ✎ ${m.source} — ${m.expression} (line ${m.line})`); - } - } - } -} - -/** - * CLI display for dataflow --impact. - */ -function dataflowImpact(name, customDbPath, opts = {}) { - const data = dataflowImpactData(name, customDbPath, { - noTests: opts.noTests, - depth: opts.depth ? Number(opts.depth) : 5, - file: opts.file, - kind: opts.kind, - limit: opts.limit, - offset: opts.offset, - }); - - if (outputResult(data, 'results', opts)) return; - - if (data.warning) { - console.log(`⚠ ${data.warning}`); - return; - } - if (data.results.length === 0) { - console.log(`No symbols matching "${name}".`); - return; - } - - for (const r of data.results) { - console.log( - `\n${r.kind} ${r.name} (${r.file}:${r.line}) — ${r.totalAffected} data-dependent consumer${r.totalAffected !== 1 ? 's' : ''}`, - ); - for (const [level, items] of Object.entries(r.levels)) { - console.log(` Level ${level}:`); - for (const item of items) { - console.log(` ${item.name} (${item.file}:${item.line})`); - } - } - } -} diff --git a/src/presentation/dataflow.ts b/src/presentation/dataflow.ts new file mode 100644 index 00000000..3071c904 --- /dev/null +++ b/src/presentation/dataflow.ts @@ -0,0 +1,173 @@ +import { dataflowData, dataflowImpactData } from '../features/dataflow.js'; +import { outputResult } from '../infrastructure/result-formatter.js'; + +interface DataflowCliOpts { + json?: boolean; + ndjson?: boolean; + noTests?: boolean; + file?: string; + kind?: string; + impact?: boolean; + depth?: string | number; + limit?: number; + offset?: number; +} + +interface DataflowFlow { + target?: string; + source?: string; + file: string; + line: number; + paramIndex: number; + confidence: number; +} + +interface DataflowReturn { + consumer?: string; + producer?: string; + file: string; + line: number; + expression: string; +} + +interface DataflowMutation { + source?: string; + expression: string; + line: number; +} + +interface DataflowResultEntry { + kind: string; + name: string; + file: string; + line: number; + flowsTo: DataflowFlow[]; + flowsFrom: DataflowFlow[]; + returns: DataflowReturn[]; + returnedBy: DataflowReturn[]; + mutates: DataflowMutation[]; + mutatedBy: DataflowMutation[]; +} + +interface DataflowImpactEntry { + kind: string; + name: string; + file: string; + line: number; + totalAffected: number; + levels: Record>; +} + +export function dataflow( + name: string, + customDbPath: string | undefined, + opts: DataflowCliOpts = {}, +): void { + if (opts.impact) { + dataflowImpact(name, customDbPath, opts); + return; + } + + const data = dataflowData(name, customDbPath, opts) as { + warning?: string; + results: DataflowResultEntry[]; + }; + + if (outputResult(data, 'results', opts)) return; + + if (data.warning) { + console.log(`\u26A0 ${data.warning}`); + return; + } + if (data.results.length === 0) { + console.log(`No symbols matching "${name}".`); + return; + } + + for (const r of data.results) { + console.log(`\n${r.kind} ${r.name} (${r.file}:${r.line})`); + console.log('\u2500'.repeat(60)); + + if (r.flowsTo.length > 0) { + console.log('\n Data flows TO:'); + for (const f of r.flowsTo) { + const conf = f.confidence < 1.0 ? ` [${(f.confidence * 100).toFixed(0)}%]` : ''; + console.log(` \u2192 ${f.target} (${f.file}:${f.line}) arg[${f.paramIndex}]${conf}`); + } + } + + if (r.flowsFrom.length > 0) { + console.log('\n Data flows FROM:'); + for (const f of r.flowsFrom) { + const conf = f.confidence < 1.0 ? ` [${(f.confidence * 100).toFixed(0)}%]` : ''; + console.log(` \u2190 ${f.source} (${f.file}:${f.line}) arg[${f.paramIndex}]${conf}`); + } + } + + if (r.returns.length > 0) { + console.log('\n Return value consumed by:'); + for (const c of r.returns) { + console.log(` \u2192 ${c.consumer} (${c.file}:${c.line}) ${c.expression}`); + } + } + + if (r.returnedBy.length > 0) { + console.log('\n Uses return value of:'); + for (const p of r.returnedBy) { + console.log(` \u2190 ${p.producer} (${p.file}:${p.line}) ${p.expression}`); + } + } + + if (r.mutates.length > 0) { + console.log('\n Mutates:'); + for (const m of r.mutates) { + console.log(` \u270E ${m.expression} (line ${m.line})`); + } + } + + if (r.mutatedBy.length > 0) { + console.log('\n Mutated by:'); + for (const m of r.mutatedBy) { + console.log(` \u270E ${m.source} \u2014 ${m.expression} (line ${m.line})`); + } + } + } +} + +function dataflowImpact( + name: string, + customDbPath: string | undefined, + opts: DataflowCliOpts = {}, +): void { + const data = dataflowImpactData(name, customDbPath, { + noTests: opts.noTests, + depth: opts.depth ? Number(opts.depth) : 5, + file: opts.file, + kind: opts.kind, + limit: opts.limit, + offset: opts.offset, + }) as { warning?: string; results: DataflowImpactEntry[] }; + + if (outputResult(data, 'results', opts)) return; + + if (data.warning) { + console.log(`\u26A0 ${data.warning}`); + return; + } + if (data.results.length === 0) { + console.log(`No symbols matching "${name}".`); + return; + } + + for (const r of data.results) { + console.log( + `\n${r.kind} ${r.name} (${r.file}:${r.line}) \u2014 ${r.totalAffected} data-dependent consumer${r.totalAffected !== 1 ? 's' : ''}`, + ); + for (const [level, items] of Object.entries(r.levels)) { + console.log(` Level ${level}:`); + for (const item of items) { + console.log(` ${item.name} (${item.file}:${item.line})`); + } + } + } +} diff --git a/src/presentation/export.js b/src/presentation/export.ts similarity index 71% rename from src/presentation/export.js rename to src/presentation/export.ts index 6cb9b8ad..c2991a08 100644 --- a/src/presentation/export.js +++ b/src/presentation/export.ts @@ -1,16 +1,8 @@ -/** - * Graph export serializers — pure data → formatted string transforms. - * - * Each function receives pre-loaded graph data and returns a formatted string - * (or structured object for CSV). No DB access — all data must be pre-loaded. - */ - import path from 'node:path'; // ─── Escape Helpers ────────────────────────────────────────────────── -/** Escape special XML characters. */ -export function escapeXml(s) { +export function escapeXml(s: string | number): string { return String(s) .replace(/&/g, '&') .replace(/ = { entry: 'fill:#e8f5e9,stroke:#4caf50', core: 'fill:#e3f2fd,stroke:#2196f3', utility: 'fill:#f5f5f5,stroke:#9e9e9e', @@ -66,13 +54,18 @@ export const ROLE_STYLES = { // ─── DOT Serializer ────────────────────────────────────────────────── -/** - * Render file-level graph data as DOT (Graphviz) format. - * - * @param {{ dirs: Array<{ name: string, files: Array<{ path: string, basename: string }>, cohesion: number|null }>, edges: Array<{ source: string, target: string }>, totalEdges: number, limit?: number }} data - * @returns {string} - */ -export function renderFileLevelDOT(data) { +interface FileLevelDOTData { + dirs: Array<{ + name: string; + files: Array<{ path: string; basename: string }>; + cohesion: number | null; + }>; + edges: Array<{ source: string; target: string }>; + totalEdges: number; + limit?: number; +} + +export function renderFileLevelDOT(data: FileLevelDOTData): string { const lines = [ 'digraph codegraph {', ' rankdir=LR;', @@ -106,13 +99,18 @@ export function renderFileLevelDOT(data) { return lines.join('\n'); } -/** - * Render function-level graph data as DOT (Graphviz) format. - * - * @param {{ edges: Array<{ source_name: string, source_file: string, target_name: string, target_file: string }>, totalEdges: number, limit?: number }} data - * @returns {string} - */ -export function renderFunctionLevelDOT(data) { +interface FunctionLevelDOTData { + edges: Array<{ + source_name: string; + source_file: string; + target_name: string; + target_file: string; + }>; + totalEdges: number; + limit?: number; +} + +export function renderFunctionLevelDOT(data: FunctionLevelDOTData): string { const lines = [ 'digraph codegraph {', ' rankdir=LR;', @@ -121,7 +119,7 @@ export function renderFunctionLevelDOT(data) { '', ]; - const emittedNodes = new Set(); + const emittedNodes = new Set(); for (const e of data.edges) { const sId = `${e.source_file}:${e.source_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); const tId = `${e.target_file}:${e.target_name}`.replace(/[^a-zA-Z0-9_]/g, '_'); @@ -145,20 +143,22 @@ export function renderFunctionLevelDOT(data) { // ─── Mermaid Serializer ────────────────────────────────────────────── -/** - * Render file-level graph data as Mermaid flowchart format. - * - * @param {{ direction: string, dirs: Array<{ name: string, files: string[] }>, edges: Array<{ source: string, target: string, edge_kind: string }>, totalEdges: number, limit?: number }} data - * @returns {string} - */ -export function renderFileLevelMermaid(data) { +interface FileLevelMermaidData { + direction: string; + dirs: Array<{ name: string; files: string[] }>; + edges: Array<{ source: string; target: string; edge_kind: string }>; + totalEdges: number; + limit?: number; +} + +export function renderFileLevelMermaid(data: FileLevelMermaidData): string { const lines = [`flowchart ${data.direction || 'LR'}`]; let nodeCounter = 0; - const nodeIdMap = new Map(); - function nodeId(key) { + const nodeIdMap = new Map(); + function nodeId(key: string): string { if (!nodeIdMap.has(key)) nodeIdMap.set(key, `n${nodeCounter++}`); - return nodeIdMap.get(key); + return nodeIdMap.get(key)!; } // Emit subgraphs @@ -173,12 +173,12 @@ export function renderFileLevelMermaid(data) { } // Deduplicate edges per source-target pair, collecting all distinct kinds - const edgeMap = new Map(); + const edgeMap = new Map }>(); for (const { source, target, edge_kind } of data.edges) { const key = `${source}|${target}`; const label = edge_kind === 'imports-type' ? 'imports' : edge_kind; if (!edgeMap.has(key)) edgeMap.set(key, { source, target, labels: new Set() }); - edgeMap.get(key).labels.add(label); + edgeMap.get(key)!.labels.add(label); } for (const { source, target, labels } of edgeMap.values()) { @@ -191,25 +191,37 @@ export function renderFileLevelMermaid(data) { return lines.join('\n'); } -/** - * Render function-level graph data as Mermaid flowchart format. - * - * @param {{ direction: string, edges: Array, roles: Map, totalEdges: number, limit?: number }} data - * @returns {string} - */ -export function renderFunctionLevelMermaid(data) { +interface FunctionLevelMermaidEdge { + source_file: string; + source_name: string; + source_kind: string; + target_file: string; + target_name: string; + target_kind: string; + edge_kind: string; +} + +interface FunctionLevelMermaidData { + direction: string; + edges: FunctionLevelMermaidEdge[]; + roles?: Map; + totalEdges: number; + limit?: number; +} + +export function renderFunctionLevelMermaid(data: FunctionLevelMermaidData): string { const lines = [`flowchart ${data.direction || 'LR'}`]; let nodeCounter = 0; - const nodeIdMap = new Map(); - function nodeId(key) { + const nodeIdMap = new Map(); + function nodeId(key: string): string { if (!nodeIdMap.has(key)) nodeIdMap.set(key, `n${nodeCounter++}`); - return nodeIdMap.get(key); + return nodeIdMap.get(key)!; } // Group nodes by file for subgraphs - const fileNodes = new Map(); - const nodeKinds = new Map(); + const fileNodes = new Map>(); + const nodeKinds = new Map(); for (const e of data.edges) { const sKey = `${e.source_file}::${e.source_name}`; const tKey = `${e.target_file}::${e.target_name}`; @@ -219,10 +231,10 @@ export function renderFunctionLevelMermaid(data) { nodeKinds.set(tKey, e.target_kind); if (!fileNodes.has(e.source_file)) fileNodes.set(e.source_file, new Map()); - fileNodes.get(e.source_file).set(sKey, e.source_name); + fileNodes.get(e.source_file)!.set(sKey, e.source_name); if (!fileNodes.has(e.target_file)) fileNodes.set(e.target_file, new Map()); - fileNodes.get(e.target_file).set(tKey, e.target_name); + fileNodes.get(e.target_file)!.set(tKey, e.target_name); } // Emit subgraphs grouped by file @@ -231,7 +243,7 @@ export function renderFunctionLevelMermaid(data) { lines.push(` subgraph ${sgId}["${escapeLabel(file)}"]`); for (const [key, name] of nodes) { const kind = nodeKinds.get(key); - lines.push(` ${nodeId(key)}${mermaidShape(kind, name)}`); + lines.push(` ${nodeId(key)}${mermaidShape(kind!, name)}`); } lines.push(' end'); } @@ -247,7 +259,7 @@ export function renderFunctionLevelMermaid(data) { } // Role styling - const roleStyles = []; + const roleStyles: string[] = []; for (const [key, nid] of nodeIdMap) { const role = data.roles?.get(key); if (role && ROLE_STYLES[role]) { @@ -261,13 +273,11 @@ export function renderFunctionLevelMermaid(data) { // ─── GraphML Serializer ────────────────────────────────────────────── -/** - * Render file-level graph data as GraphML (XML) format. - * - * @param {{ edges: Array<{ source: string, target: string }> }} data - * @returns {string} - */ -export function renderFileLevelGraphML(data) { +interface FileLevelGraphMLData { + edges: Array<{ source: string; target: string }>; +} + +export function renderFileLevelGraphML(data: FileLevelGraphMLData): string { const lines = [ '', '', @@ -277,13 +287,13 @@ export function renderFileLevelGraphML(data) { ' ', ]; - const files = new Set(); + const files = new Set(); for (const { source, target } of data.edges) { files.add(source); files.add(target); } - const fileIds = new Map(); + const fileIds = new Map(); let nIdx = 0; for (const f of files) { const id = `n${nIdx++}`; @@ -308,13 +318,28 @@ export function renderFileLevelGraphML(data) { return lines.join('\n'); } -/** - * Render function-level graph data as GraphML (XML) format. - * - * @param {{ edges: Array }} data - * @returns {string} - */ -export function renderFunctionLevelGraphML(data) { +interface FunctionLevelGraphMLEdge { + source_id: number; + source_name: string; + source_kind: string; + source_file: string; + source_line: number; + source_role: string | null; + target_id: number; + target_name: string; + target_kind: string; + target_file: string; + target_line: number; + target_role: string | null; + edge_kind: string; + confidence: number; +} + +interface FunctionLevelGraphMLData { + edges: FunctionLevelGraphMLEdge[]; +} + +export function renderFunctionLevelGraphML(data: FunctionLevelGraphMLData): string { const lines = [ '', '', @@ -328,8 +353,15 @@ export function renderFunctionLevelGraphML(data) { ' ', ]; - const emittedNodes = new Set(); - function emitNode(id, name, kind, file, line, role) { + const emittedNodes = new Set(); + function emitNode( + id: number, + name: string, + kind: string, + file: string, + line: number, + role: string | null, + ): void { if (emittedNodes.has(id)) return; emittedNodes.add(id); lines.push(` `); @@ -372,14 +404,22 @@ export function renderFunctionLevelGraphML(data) { // ─── Neo4j CSV Serializer ──────────────────────────────────────────── -/** - * Render file-level graph data as Neo4j bulk-import CSV. - * - * @param {{ edges: Array<{ source: string, target: string, edge_kind: string, confidence: number }> }} data - * @returns {{ nodes: string, relationships: string }} - */ -export function renderFileLevelNeo4jCSV(data) { - const files = new Map(); +interface FileLevelNeo4jEdge { + source: string; + target: string; + edge_kind: string; + confidence: number; +} + +interface FileLevelNeo4jData { + edges: FileLevelNeo4jEdge[]; +} + +export function renderFileLevelNeo4jCSV(data: FileLevelNeo4jData): { + nodes: string; + relationships: string; +} { + const files = new Map(); let idx = 0; for (const { source, target } of data.edges) { if (!files.has(source)) files.set(source, idx++); @@ -400,16 +440,41 @@ export function renderFileLevelNeo4jCSV(data) { return { nodes: nodeLines.join('\n'), relationships: relLines.join('\n') }; } -/** - * Render function-level graph data as Neo4j bulk-import CSV. - * - * @param {{ edges: Array }} data - * @returns {{ nodes: string, relationships: string }} - */ -export function renderFunctionLevelNeo4jCSV(data) { - const emitted = new Set(); +interface FunctionLevelNeo4jEdge { + source_id: number; + source_name: string; + source_kind: string; + source_file: string; + source_line: number; + source_role: string | null; + target_id: number; + target_name: string; + target_kind: string; + target_file: string; + target_line: number; + target_role: string | null; + edge_kind: string; + confidence: number; +} + +interface FunctionLevelNeo4jData { + edges: FunctionLevelNeo4jEdge[]; +} + +export function renderFunctionLevelNeo4jCSV(data: FunctionLevelNeo4jData): { + nodes: string; + relationships: string; +} { + const emitted = new Set(); const nodeLines = ['nodeId:ID,name,kind,file:string,line:int,role,:LABEL']; - function emitNode(id, name, kind, file, line, role) { + function emitNode( + id: number, + name: string, + kind: string, + file: string, + line: number, + role: string | null, + ): void { if (emitted.has(id)) return; emitted.add(id); const label = kind.charAt(0).toUpperCase() + kind.slice(1); diff --git a/src/presentation/flow.js b/src/presentation/flow.ts similarity index 68% rename from src/presentation/flow.js rename to src/presentation/flow.ts index e86f2de9..1225a042 100644 --- a/src/presentation/flow.js +++ b/src/presentation/flow.ts @@ -2,23 +2,40 @@ import { kindIcon } from '../domain/queries.js'; import { flowData, listEntryPointsData } from '../features/flow.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI formatter — text or JSON output. - */ -export function flow(name, dbPath, opts = {}) { +interface FlowOpts { + list?: boolean; + noTests?: boolean; + limit?: number; + offset?: number; + depth?: number; + file?: string; + kind?: string; + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; +} + +export function flow(name: string, dbPath: string | undefined, opts: FlowOpts = {}): void { if (opts.list) { const data = listEntryPointsData(dbPath, { noTests: opts.noTests, limit: opts.limit, offset: opts.offset, - }); + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from listEntryPointsData + }) as any; if (outputResult(data, 'entries', opts)) return; if (data.count === 0) { console.log('No entry points found. Run "codegraph build" first.'); return; } console.log(`\nEntry points (${data.count} total):\n`); - for (const [type, entries] of Object.entries(data.byType)) { + for (const [type, entries] of Object.entries( + data.byType as Record< + string, + Array<{ kind: string; name: string; file: string; line: number }> + >, + )) { console.log(` ${type} (${entries.length}):`); for (const e of entries) { console.log(` [${kindIcon(e.kind)}] ${e.name} ${e.file}:${e.line}`); @@ -28,7 +45,8 @@ export function flow(name, dbPath, opts = {}) { return; } - const data = flowData(name, dbPath, opts); + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from flowData + const data = flowData(name, dbPath, opts) as any; if (outputResult(data, 'steps', opts)) return; if (!data.entry) { @@ -55,7 +73,9 @@ export function flow(name, dbPath, opts = {}) { for (const step of data.steps) { console.log(` depth ${step.depth}:`); for (const n of step.nodes) { - const isLeaf = data.leaves.some((l) => l.name === n.name && l.file === n.file); + const isLeaf = data.leaves.some( + (l: { name: string; file: string }) => l.name === n.name && l.file === n.file, + ); const leafTag = isLeaf ? ' [leaf]' : ''; console.log(` [${kindIcon(n.kind)}] ${n.name} ${n.file}:${n.line}${leafTag}`); } diff --git a/src/presentation/manifesto.js b/src/presentation/manifesto.ts similarity index 75% rename from src/presentation/manifesto.js rename to src/presentation/manifesto.ts index f491e0bf..ca338e28 100644 --- a/src/presentation/manifesto.js +++ b/src/presentation/manifesto.ts @@ -1,11 +1,30 @@ import { manifestoData } from '../features/manifesto.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI formatter — prints manifesto results and sets exitCode 1 on failure. - */ -export function manifesto(customDbPath, opts = {}) { - const data = manifestoData(customDbPath, opts); +interface ManifestoOpts { + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; + config?: unknown; + noTests?: boolean; + limit?: number; + offset?: number; +} + +interface ManifestoViolationRow { + rule: string; + level: string; + value: number; + threshold: number; + name?: string; + file?: string; + line?: number; +} + +export function manifesto(customDbPath: string | undefined, opts: ManifestoOpts = {}): void { + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from manifestoData + const data = manifestoData(customDbPath, opts as any) as any; if (outputResult(data, 'violations', opts)) { if (!data.passed) process.exitCode = 1; @@ -39,8 +58,8 @@ export function manifesto(customDbPath, opts = {}) { // Violations detail if (data.violations.length > 0) { - const failViolations = data.violations.filter((v) => v.level === 'fail'); - const warnViolations = data.violations.filter((v) => v.level === 'warn'); + const failViolations = data.violations.filter((v: ManifestoViolationRow) => v.level === 'fail'); + const warnViolations = data.violations.filter((v: ManifestoViolationRow) => v.level === 'warn'); if (failViolations.length > 0) { console.log(`\n## Failures (${failViolations.length})\n`); diff --git a/src/presentation/owners.js b/src/presentation/owners.ts similarity index 59% rename from src/presentation/owners.js rename to src/presentation/owners.ts index 2ec0f5c3..e8854a2a 100644 --- a/src/presentation/owners.js +++ b/src/presentation/owners.ts @@ -1,12 +1,46 @@ import { ownersData } from '../features/owners.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI display function for the `owners` command. - */ -export function owners(customDbPath, opts = {}) { - const data = ownersData(customDbPath, opts); - if (outputResult(data, null, opts)) return; +interface OwnersOpts { + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; + owner?: string; + noTests?: boolean; + limit?: number; + offset?: number; +} + +interface OwnerEntry { + name: string; + kind: string; + file: string; + line: number; + owners: string[]; +} + +interface OwnersResult { + codeownersFile: string | null; + files: Array<{ file: string; owners: string[] }>; + symbols: OwnerEntry[]; + boundaries: Array<{ + from: OwnerEntry; + to: OwnerEntry; + edgeKind: string; + }>; + summary: { + coveragePercent: number; + ownedFiles: number; + totalFiles: number; + ownerCount: number; + byOwner: Array<{ owner: string; fileCount: number }>; + }; +} + +export function owners(customDbPath: string | undefined, opts: OwnersOpts = {}): void { + const data = ownersData(customDbPath, opts) as OwnersResult; + if (outputResult(data as unknown as Record, null, opts)) return; if (!data.codeownersFile) { console.log('No CODEOWNERS file found.'); diff --git a/src/presentation/queries-cli.js b/src/presentation/queries-cli.ts similarity index 100% rename from src/presentation/queries-cli.js rename to src/presentation/queries-cli.ts diff --git a/src/presentation/queries-cli/exports.js b/src/presentation/queries-cli/exports.ts similarity index 70% rename from src/presentation/queries-cli/exports.js rename to src/presentation/queries-cli/exports.ts index 445255a6..d8e779a4 100644 --- a/src/presentation/queries-cli/exports.js +++ b/src/presentation/queries-cli/exports.ts @@ -1,7 +1,48 @@ import { exportsData, kindIcon } from '../../domain/queries.js'; import { outputResult } from '../../infrastructure/result-formatter.js'; -function printExportHeader(data, opts) { +interface ExportConsumer { + name: string; + file: string; + line: number; +} + +interface ExportSymbol { + kind: string; + name: string; + line: number; + role?: string; + signature?: { params?: string }; + consumers: ExportConsumer[]; +} + +interface ReexportedSymbol extends ExportSymbol { + originFile: string; +} + +interface ExportsDataResult { + file: string; + totalExported: number; + totalInternal: number; + totalUnused: number; + totalReexported?: number; + totalReexportedUnused?: number; + results: ExportSymbol[]; + reexportedSymbols: ReexportedSymbol[]; + reexports: { file: string }[]; +} + +interface ExportsOpts { + json?: boolean; + ndjson?: boolean; + csv?: boolean; + table?: boolean; + noTests?: boolean; + unused?: boolean; + [key: string]: unknown; +} + +function printExportHeader(data: ExportsDataResult, opts: ExportsOpts): void { if (opts.unused) { console.log( `\n# ${data.file} — ${data.totalUnused} unused export${data.totalUnused !== 1 ? 's' : ''} (of ${data.totalExported} exported)\n`, @@ -14,7 +55,7 @@ function printExportHeader(data, opts) { } } -function printExportSymbols(results) { +function printExportSymbols(results: ExportSymbol[]): void { for (const sym of results) { const icon = kindIcon(sym.kind); const sig = sym.signature?.params ? `(${sym.signature.params})` : ''; @@ -30,12 +71,12 @@ function printExportSymbols(results) { } } -function printReexportedSymbols(reexportedSymbols) { +function printReexportedSymbols(reexportedSymbols: ReexportedSymbol[]): void { // Group by origin file - const byOrigin = new Map(); + const byOrigin = new Map(); for (const sym of reexportedSymbols) { if (!byOrigin.has(sym.originFile)) byOrigin.set(sym.originFile, []); - byOrigin.get(sym.originFile).push(sym); + byOrigin.get(sym.originFile)!.push(sym); } for (const [originFile, syms] of byOrigin) { @@ -56,9 +97,9 @@ function printReexportedSymbols(reexportedSymbols) { } } -export function fileExports(file, customDbPath, opts = {}) { - const data = exportsData(file, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function fileExports(file: string, customDbPath: string, opts: ExportsOpts = {}): void { + const data = exportsData(file, customDbPath, opts) as ExportsDataResult; + if (outputResult(data as unknown as Record, 'results', opts)) return; const hasReexported = data.reexportedSymbols && data.reexportedSymbols.length > 0; diff --git a/src/presentation/queries-cli/impact.js b/src/presentation/queries-cli/impact.ts similarity index 62% rename from src/presentation/queries-cli/impact.js rename to src/presentation/queries-cli/impact.ts index 511cb42e..37852f4a 100644 --- a/src/presentation/queries-cli/impact.js +++ b/src/presentation/queries-cli/impact.ts @@ -9,9 +9,123 @@ import { } from '../../domain/queries.js'; import { outputResult } from '../../infrastructure/result-formatter.js'; -export function fileDeps(file, customDbPath, opts = {}) { - const data = fileDepsData(file, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +interface SymbolRef { + kind: string; + name: string; + file: string; + line: number; +} + +interface FileDepsImport { + file: string; + typeOnly?: boolean; +} + +interface FileDepsResult { + file: string; + imports: FileDepsImport[]; + importedBy: { file: string }[]; + definitions: SymbolRef[]; +} + +interface FileDepsData { + results: FileDepsResult[]; +} + +interface CallerRef extends SymbolRef { + viaHierarchy?: string; +} + +interface FnDepsResult extends SymbolRef { + callees: SymbolRef[]; + callers: CallerRef[]; + transitiveCallers: Record; +} + +interface FnDepsData { + results: FnDepsResult[]; +} + +interface ImpactNode { + file: string; +} + +interface ImpactData { + sources: string[]; + levels: Record; + totalDependents: number; +} + +interface FnImpactResult extends SymbolRef { + levels: Record; + totalDependents: number; +} + +interface FnImpactData { + results: FnImpactResult[]; +} + +interface AffectedFunction extends SymbolRef { + transitiveCallers: number; +} + +interface CoupledFile { + file: string; + coupledWith: string; + jaccard: number; + commitCount: number; +} + +interface BoundaryViolation { + name: string; + file: string; + targetFile: string; + message?: string; +} + +interface DiffSummary { + functionsChanged: number; + callersAffected: number; + filesAffected: number; + historicallyCoupledCount: number; + ownersAffected: number; + boundaryViolationCount: number; +} + +interface DiffImpactData { + error?: string; + changedFiles: number; + affectedFunctions: AffectedFunction[]; + historicallyCoupled?: CoupledFile[]; + ownership?: { + affectedOwners: string[]; + suggestedReviewers: string[]; + }; + boundaryViolations?: BoundaryViolation[]; + boundaryViolationCount?: number; + summary?: DiffSummary; +} + +interface OutputOpts { + json?: boolean; + ndjson?: boolean; + csv?: boolean; + table?: boolean; + noTests?: boolean; + limit?: number; + offset?: number; + depth?: number; + format?: string; + staged?: boolean; + ref?: string; + file?: string; + kind?: string; + [key: string]: unknown; +} + +export function fileDeps(file: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = fileDepsData(file, customDbPath, opts) as unknown as FileDepsData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No file matching "${file}" in graph`); @@ -37,9 +151,9 @@ export function fileDeps(file, customDbPath, opts = {}) { } } -export function fnDeps(name, customDbPath, opts = {}) { - const data = fnDepsData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function fnDeps(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = fnDepsData(name, customDbPath, opts) as unknown as FnDepsData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No function/method/class matching "${name}"`); @@ -77,9 +191,9 @@ export function fnDeps(name, customDbPath, opts = {}) { } } -export function impactAnalysis(file, customDbPath, opts = {}) { - const data = impactAnalysisData(file, customDbPath, opts); - if (outputResult(data, 'sources', opts)) return; +export function impactAnalysis(file: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = impactAnalysisData(file, customDbPath, opts) as unknown as ImpactData; + if (outputResult(data as unknown as Record, 'sources', opts)) return; if (data.sources.length === 0) { console.log(`No file matching "${file}" in graph`); @@ -93,8 +207,8 @@ export function impactAnalysis(file, customDbPath, opts = {}) { if (Object.keys(levels).length === 0) { console.log(` No dependents found.`); } else { - for (const level of Object.keys(levels).sort((a, b) => a - b)) { - const nodes = levels[level]; + for (const level of Object.keys(levels).sort((a, b) => Number(a) - Number(b))) { + const nodes = levels[level]!; console.log( `\n ${'--'.repeat(parseInt(level, 10))} Level ${level} (${nodes.length} files):`, ); @@ -106,9 +220,9 @@ export function impactAnalysis(file, customDbPath, opts = {}) { console.log(`\n Total: ${data.totalDependents} files transitively depend on "${file}"\n`); } -export function fnImpact(name, customDbPath, opts = {}) { - const data = fnImpactData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function fnImpact(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = fnImpactData(name, customDbPath, opts) as unknown as FnImpactData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No function/method/class matching "${name}"`); @@ -120,7 +234,9 @@ export function fnImpact(name, customDbPath, opts = {}) { if (Object.keys(r.levels).length === 0) { console.log(` No callers found.`); } else { - for (const [level, fns] of Object.entries(r.levels).sort((a, b) => a[0] - b[0])) { + for (const [level, fns] of Object.entries(r.levels).sort( + (a, b) => Number(a[0]) - Number(b[0]), + )) { const l = parseInt(level, 10); console.log(` ${'--'.repeat(l)} Level ${level} (${fns.length} functions):`); for (const f of fns.slice(0, 20)) @@ -132,7 +248,7 @@ export function fnImpact(name, customDbPath, opts = {}) { } } -function printDiffFunctions(data) { +function printDiffFunctions(data: DiffImpactData): void { console.log(`\ndiff-impact: ${data.changedFiles} files changed\n`); console.log(` ${data.affectedFunctions.length} functions changed:\n`); for (const fn of data.affectedFunctions) { @@ -141,7 +257,7 @@ function printDiffFunctions(data) { } } -function printDiffCoupled(data) { +function printDiffCoupled(data: DiffImpactData): void { if (!data.historicallyCoupled?.length) return; console.log('\n Historically coupled (not in static graph):\n'); for (const c of data.historicallyCoupled) { @@ -152,13 +268,13 @@ function printDiffCoupled(data) { } } -function printDiffOwnership(data) { +function printDiffOwnership(data: DiffImpactData): void { if (!data.ownership) return; console.log(`\n Affected owners: ${data.ownership.affectedOwners.join(', ')}`); console.log(` Suggested reviewers: ${data.ownership.suggestedReviewers.join(', ')}`); } -function printDiffBoundaries(data) { +function printDiffBoundaries(data: DiffImpactData): void { if (!data.boundaryViolations?.length) return; console.log(`\n Boundary violations (${data.boundaryViolationCount}):\n`); for (const v of data.boundaryViolations) { @@ -167,7 +283,7 @@ function printDiffBoundaries(data) { } } -function printDiffSummary(summary) { +function printDiffSummary(summary: DiffSummary | undefined): void { if (!summary) return; let line = `\n Summary: ${summary.functionsChanged} functions changed -> ${summary.callersAffected} callers affected across ${summary.filesAffected} files`; if (summary.historicallyCoupledCount > 0) { @@ -182,14 +298,14 @@ function printDiffSummary(summary) { console.log(`${line}\n`); } -export function diffImpact(customDbPath, opts = {}) { +export function diffImpact(customDbPath: string, opts: OutputOpts = {}): void { if (opts.format === 'mermaid') { console.log(diffImpactMermaid(customDbPath, opts)); return; } - const data = diffImpactData(customDbPath, opts); + const data = diffImpactData(customDbPath, opts) as unknown as DiffImpactData; if (opts.format === 'json') opts = { ...opts, json: true }; - if (outputResult(data, 'affectedFunctions', opts)) return; + if (outputResult(data as unknown as Record, 'affectedFunctions', opts)) return; if (data.error) { console.log(data.error); diff --git a/src/presentation/queries-cli/index.js b/src/presentation/queries-cli/index.ts similarity index 100% rename from src/presentation/queries-cli/index.js rename to src/presentation/queries-cli/index.ts diff --git a/src/presentation/queries-cli/inspect.js b/src/presentation/queries-cli/inspect.ts similarity index 66% rename from src/presentation/queries-cli/inspect.js rename to src/presentation/queries-cli/inspect.ts index f57c0dd6..d95656a6 100644 --- a/src/presentation/queries-cli/inspect.js +++ b/src/presentation/queries-cli/inspect.ts @@ -10,9 +10,182 @@ import { } from '../../domain/queries.js'; import { outputResult } from '../../infrastructure/result-formatter.js'; -export function where(target, customDbPath, opts = {}) { - const data = whereData(target, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +interface SymbolRef { + kind: string; + name: string; + file: string; + line: number; +} + +interface OutputOpts { + json?: boolean; + ndjson?: boolean; + csv?: boolean; + table?: boolean; + noTests?: boolean; + limit?: number; + offset?: number; + depth?: number; + noSource?: boolean; + file?: string | boolean; + kind?: string; + [key: string]: unknown; +} + +interface WhereSymbolResult { + kind: string; + name: string; + file: string; + line: number; + role?: string; + exported?: boolean; + uses: { file: string; line: number }[]; +} + +interface WhereFileResult { + file: string; + symbols: { name: string; line: number }[]; + imports: string[]; + importedBy: string[]; + exported: string[]; +} + +interface WhereData { + mode: 'symbol' | 'file'; + results: (WhereSymbolResult | WhereFileResult)[]; +} + +interface QueryNameCallee { + name: string; + edgeKind: string; + file: string; + line: number; +} + +interface QueryNameResult extends SymbolRef { + callees: QueryNameCallee[]; + callers: QueryNameCallee[]; +} + +interface QueryNameData { + results: QueryNameResult[]; +} + +interface ChildRef { + kind: string; + name: string; + line: number; +} + +interface CallerRef extends SymbolRef { + viaHierarchy?: string; +} + +interface CalleeRef extends SymbolRef { + summary?: string; + source?: string; +} + +interface ContextResult extends SymbolRef { + endLine?: number; + role?: string; + signature?: { params?: string; returnType?: string }; + children: ChildRef[]; + complexity?: { + cognitive: number; + cyclomatic: number; + maxNesting: number; + maintainabilityIndex?: number; + }; + source?: string; + callees: CalleeRef[]; + callers: CallerRef[]; + implementors?: SymbolRef[]; + implements?: SymbolRef[]; + relatedTests: { + file: string; + testCount: number; + testNames: string[]; + source?: string; + }[]; +} + +interface ContextData { + results: ContextResult[]; +} + +interface ChildrenResult extends SymbolRef { + children: ChildRef[]; +} + +interface ChildrenData { + results: ChildrenResult[]; +} + +interface ExplainSymbol { + kind: string; + name: string; + line: number; + role?: string; + signature?: { params?: string; returnType?: string }; + summary?: string; +} + +interface FileExplainResult { + file: string; + lineCount?: number; + symbolCount: number; + publicApi: ExplainSymbol[]; + internal: ExplainSymbol[]; + imports: { file: string }[]; + importedBy: { file: string }[]; + dataFlow: { caller: string; callees: string[] }[]; +} + +interface FunctionExplainResult extends SymbolRef { + endLine?: number; + lineCount?: number; + summary?: string; + role?: string; + _depth?: number; + signature?: { params?: string; returnType?: string }; + complexity?: { + cognitive: number; + cyclomatic: number; + maxNesting: number; + maintainabilityIndex?: number; + }; + callees: SymbolRef[]; + callers: SymbolRef[]; + relatedTests: { file: string }[]; + depDetails?: FunctionExplainResult[]; +} + +interface ExplainData { + kind: 'file' | 'function'; + // biome-ignore lint/suspicious/noExplicitAny: union of file/function results + results: any[]; +} + +interface ImplementationsResult extends SymbolRef { + implementors: SymbolRef[]; +} + +interface ImplementationsData { + results: ImplementationsResult[]; +} + +interface InterfacesResult extends SymbolRef { + interfaces: SymbolRef[]; +} + +interface InterfacesData { + results: InterfacesResult[]; +} + +export function where(target: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = whereData(target, customDbPath, opts as Record) as WhereData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log( @@ -24,7 +197,7 @@ export function where(target, customDbPath, opts = {}) { } if (data.mode === 'symbol') { - for (const r of data.results) { + for (const r of data.results as WhereSymbolResult[]) { const roleTag = r.role ? ` [${r.role}]` : ''; const tag = r.exported ? ' (exported)' : ''; console.log(`\n${kindIcon(r.kind)} ${r.name}${roleTag} ${r.file}:${r.line}${tag}`); @@ -36,7 +209,7 @@ export function where(target, customDbPath, opts = {}) { } } } else { - for (const r of data.results) { + for (const r of data.results as WhereFileResult[]) { console.log(`\n# ${r.file}`); if (r.symbols.length > 0) { const symStrs = r.symbols.map((s) => `${s.name}:${s.line}`); @@ -56,13 +229,13 @@ export function where(target, customDbPath, opts = {}) { console.log(); } -export function queryName(name, customDbPath, opts = {}) { +export function queryName(name: string, customDbPath: string, opts: OutputOpts = {}): void { const data = queryNameData(name, customDbPath, { noTests: opts.noTests, limit: opts.limit, offset: opts.offset, - }); - if (outputResult(data, 'results', opts)) return; + }) as QueryNameData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No results for "${name}"`); @@ -88,9 +261,9 @@ export function queryName(name, customDbPath, opts = {}) { } } -export function context(name, customDbPath, opts = {}) { - const data = contextData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function context(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = contextData(name, customDbPath, opts as Record) as ContextData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No function/method/class matching "${name}"`); @@ -102,9 +275,9 @@ export function context(name, customDbPath, opts = {}) { } } -export function children(name, customDbPath, opts = {}) { - const data = childrenData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function children(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = childrenData(name, customDbPath, opts as Record) as ChildrenData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No symbol matching "${name}"`); @@ -122,7 +295,7 @@ export function children(name, customDbPath, opts = {}) { } } -function renderContextResult(r) { +function renderContextResult(r: ContextResult): void { const lineRange = r.endLine ? `${r.line}-${r.endLine}` : `${r.line}`; const roleTag = r.role ? ` [${r.role}]` : ''; console.log(`\n# ${r.name} (${r.kind})${roleTag} — ${r.file}:${lineRange}\n`); @@ -222,7 +395,7 @@ function renderContextResult(r) { } } -function renderFileExplain(r) { +function renderFileExplain(r: FileExplainResult): void { const publicCount = r.publicApi.length; const internalCount = r.internal.length; const lineInfo = r.lineCount ? `${r.lineCount} lines, ` : ''; @@ -267,7 +440,7 @@ function renderFileExplain(r) { console.log(); } -function renderFunctionExplain(r, indent = '') { +function renderFunctionExplain(r: FunctionExplainResult, indent = ''): void { const lineRange = r.endLine ? `${r.line}-${r.endLine}` : `${r.line}`; const lineInfo = r.lineCount ? `${r.lineCount} lines` : ''; const summaryPart = r.summary ? ` | ${r.summary}` : ''; @@ -326,9 +499,9 @@ function renderFunctionExplain(r, indent = '') { console.log(); } -export function explain(target, customDbPath, opts = {}) { - const data = explainData(target, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function explain(target: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = explainData(target, customDbPath, opts as Record) as ExplainData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No ${data.kind === 'file' ? 'file' : 'function/symbol'} matching "${target}"`); @@ -337,18 +510,22 @@ export function explain(target, customDbPath, opts = {}) { if (data.kind === 'file') { for (const r of data.results) { - renderFileExplain(r); + renderFileExplain(r as FileExplainResult); } } else { for (const r of data.results) { - renderFunctionExplain(r); + renderFunctionExplain(r as FunctionExplainResult); } } } -export function implementations(name, customDbPath, opts = {}) { - const data = implementationsData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function implementations(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = implementationsData( + name, + customDbPath, + opts as Record, + ) as ImplementationsData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No symbol matching "${name}"`); @@ -369,9 +546,13 @@ export function implementations(name, customDbPath, opts = {}) { console.log(); } -export function interfaces(name, customDbPath, opts = {}) { - const data = interfacesData(name, customDbPath, opts); - if (outputResult(data, 'results', opts)) return; +export function interfaces(name: string, customDbPath: string, opts: OutputOpts = {}): void { + const data = interfacesData( + name, + customDbPath, + opts as Record, + ) as InterfacesData; + if (outputResult(data as unknown as Record, 'results', opts)) return; if (data.results.length === 0) { console.log(`No symbol matching "${name}"`); diff --git a/src/presentation/queries-cli/overview.js b/src/presentation/queries-cli/overview.ts similarity index 60% rename from src/presentation/queries-cli/overview.js rename to src/presentation/queries-cli/overview.ts index f4f530b9..6de6f5fa 100644 --- a/src/presentation/queries-cli/overview.js +++ b/src/presentation/queries-cli/overview.ts @@ -2,7 +2,116 @@ import path from 'node:path'; import { kindIcon, moduleMapData, rolesData, statsData } from '../../domain/queries.js'; import { outputResult } from '../../infrastructure/result-formatter.js'; -function printCountGrid(entries, padWidth) { +interface OutputOpts { + json?: boolean; + ndjson?: boolean; + csv?: boolean; + table?: boolean; + noTests?: boolean; + role?: string | null; + file?: string; + kind?: string; + limit?: number; + offset?: number; + [key: string]: unknown; +} + +interface StatsNodes { + total: number; + byKind: Record; +} + +interface StatsEdges { + total: number; + byKind: Record; +} + +interface StatsFiles { + total: number; + languages: number; + byLanguage: Record; +} + +interface StatsCycles { + fileLevel: number; + functionLevel: number; +} + +interface Hotspot { + file: string; + fanIn: number; + fanOut: number; +} + +interface EmbeddingsInfo { + count: number; + model?: string; + dim?: number; + builtAt?: string; +} + +interface QualityInfo { + score: number; + callerCoverage: { ratio: number; covered: number; total: number }; + callConfidence: { ratio: number; highConf: number; total: number }; + falsePositiveWarnings: { name: string; callerCount: number; file: string; line: number }[]; +} + +interface ComplexityInfo { + analyzed: number; + avgCognitive: number; + avgCyclomatic: number; + maxCognitive: number; + avgMI?: number; + minMI?: number; +} + +interface CommunityInfo { + communityCount: number; + modularity: number; + driftScore: number; +} + +interface StatsData { + nodes: StatsNodes; + edges: StatsEdges; + files: StatsFiles; + cycles: StatsCycles; + hotspots: Hotspot[]; + embeddings?: EmbeddingsInfo; + quality?: QualityInfo; + roles?: Record; + complexity?: ComplexityInfo; + communities?: CommunityInfo; +} + +interface TopNode { + dir: string; + file: string; + inEdges: number; + outEdges: number; +} + +interface ModuleMapData { + topNodes: TopNode[]; + stats: { totalFiles: number; totalNodes: number; totalEdges: number }; +} + +interface RoleSymbol { + kind: string; + name: string; + file: string; + line: number; + role: string; +} + +interface RolesData { + count: number; + summary: Record; + symbols: RoleSymbol[]; +} + +function printCountGrid(entries: [string, number][], padWidth: number): void { const parts = entries.map(([k, v]) => `${k} ${v}`); for (let i = 0; i < parts.length; i += 3) { const row = parts @@ -13,35 +122,44 @@ function printCountGrid(entries, padWidth) { } } -function printNodes(data) { +function printNodes(data: StatsData): void { console.log(`Nodes: ${data.nodes.total} total`); - const kindEntries = Object.entries(data.nodes.byKind).sort((a, b) => b[1] - a[1]); + const kindEntries = Object.entries(data.nodes.byKind).sort((a, b) => b[1] - a[1]) as [ + string, + number, + ][]; printCountGrid(kindEntries, 18); } -function printEdges(data) { +function printEdges(data: StatsData): void { console.log(`\nEdges: ${data.edges.total} total`); - const edgeEntries = Object.entries(data.edges.byKind).sort((a, b) => b[1] - a[1]); + const edgeEntries = Object.entries(data.edges.byKind).sort((a, b) => b[1] - a[1]) as [ + string, + number, + ][]; printCountGrid(edgeEntries, 18); } -function printFiles(data) { +function printFiles(data: StatsData): void { console.log(`\nFiles: ${data.files.total} (${data.files.languages} languages)`); - const langEntries = Object.entries(data.files.byLanguage).sort((a, b) => b[1] - a[1]); + const langEntries = Object.entries(data.files.byLanguage).sort((a, b) => b[1] - a[1]) as [ + string, + number, + ][]; printCountGrid(langEntries, 18); } -function printCycles(data) { +function printCycles(data: StatsData): void { console.log( `\nCycles: ${data.cycles.fileLevel} file-level, ${data.cycles.functionLevel} function-level`, ); } -function printHotspots(data) { +function printHotspots(data: StatsData): void { if (data.hotspots.length > 0) { console.log(`\nTop ${data.hotspots.length} coupling hotspots:`); for (let i = 0; i < data.hotspots.length; i++) { - const h = data.hotspots[i]; + const h = data.hotspots[i]!; console.log( ` ${String(i + 1).padStart(2)}. ${h.file.padEnd(35)} fan-in: ${String(h.fanIn).padStart(3)} fan-out: ${String(h.fanOut).padStart(3)}`, ); @@ -49,7 +167,7 @@ function printHotspots(data) { } } -function printEmbeddings(data) { +function printEmbeddings(data: StatsData): void { if (data.embeddings) { const e = data.embeddings; console.log( @@ -60,7 +178,7 @@ function printEmbeddings(data) { } } -function printQuality(data) { +function printQuality(data: StatsData): void { if (data.quality) { const q = data.quality; const cc = q.callerCoverage; @@ -81,16 +199,19 @@ function printQuality(data) { } } -function printRoles(data) { +function printRoles(data: StatsData): void { if (data.roles && Object.keys(data.roles).length > 0) { const total = Object.values(data.roles).reduce((a, b) => a + b, 0); console.log(`\nRoles: ${total} classified symbols`); - const roleEntries = Object.entries(data.roles).sort((a, b) => b[1] - a[1]); + const roleEntries = Object.entries(data.roles).sort((a, b) => b[1] - a[1]) as [ + string, + number, + ][]; printCountGrid(roleEntries, 18); } } -function printComplexity(data) { +function printComplexity(data: StatsData): void { if (data.complexity) { const cx = data.complexity; const miPart = cx.avgMI != null ? ` | avg MI: ${cx.avgMI} | min MI: ${cx.minMI}` : ''; @@ -100,7 +221,7 @@ function printComplexity(data) { } } -function printCommunities(data) { +function printCommunities(data: StatsData): void { if (data.communities) { const cm = data.communities; console.log( @@ -109,8 +230,8 @@ function printCommunities(data) { } } -export async function stats(customDbPath, opts = {}) { - const data = statsData(customDbPath, { noTests: opts.noTests }); +export async function stats(customDbPath: string, opts: OutputOpts = {}): Promise { + const data = statsData(customDbPath, { noTests: opts.noTests }) as StatsData; try { const { communitySummaryForStats } = await import('../../features/communities.js'); @@ -119,7 +240,7 @@ export async function stats(customDbPath, opts = {}) { /* community detection is optional; silently skip on any error */ } - if (outputResult(data, null, opts)) return; + if (outputResult(data as unknown as Record, null, opts)) return; console.log('\n# Codegraph Stats\n'); printNodes(data); @@ -135,15 +256,15 @@ export async function stats(customDbPath, opts = {}) { console.log(); } -export function moduleMap(customDbPath, limit = 20, opts = {}) { - const data = moduleMapData(customDbPath, limit, { noTests: opts.noTests }); - if (outputResult(data, 'topNodes', opts)) return; +export function moduleMap(customDbPath: string, limit = 20, opts: OutputOpts = {}): void { + const data = moduleMapData(customDbPath, limit, { noTests: opts.noTests }) as ModuleMapData; + if (outputResult(data as unknown as Record, 'topNodes', opts)) return; console.log(`\nModule map (top ${limit} most-connected nodes):\n`); - const dirs = new Map(); + const dirs = new Map(); for (const n of data.topNodes) { if (!dirs.has(n.dir)) dirs.set(n.dir, []); - dirs.get(n.dir).push(n); + dirs.get(n.dir)!.push(n); } for (const [dir, files] of [...dirs].sort()) { console.log(` [${dir}/]`); @@ -160,9 +281,9 @@ export function moduleMap(customDbPath, limit = 20, opts = {}) { ); } -export function roles(customDbPath, opts = {}) { - const data = rolesData(customDbPath, opts); - if (outputResult(data, 'symbols', opts)) return; +export function roles(customDbPath: string, opts: OutputOpts = {}): void { + const data = rolesData(customDbPath, opts) as RolesData; + if (outputResult(data as unknown as Record, 'symbols', opts)) return; if (data.count === 0) { console.log('No classified symbols found. Run "codegraph build" first.'); @@ -177,10 +298,10 @@ export function roles(customDbPath, opts = {}) { .map(([role, count]) => `${role}: ${count}`); console.log(` ${summaryParts.join(' ')}\n`); - const byRole = {}; + const byRole: Record = {}; for (const s of data.symbols) { if (!byRole[s.role]) byRole[s.role] = []; - byRole[s.role].push(s); + byRole[s.role]!.push(s); } for (const [role, symbols] of Object.entries(byRole)) { diff --git a/src/presentation/queries-cli/path.js b/src/presentation/queries-cli/path.ts similarity index 54% rename from src/presentation/queries-cli/path.js rename to src/presentation/queries-cli/path.ts index 9d61b1a6..befcc849 100644 --- a/src/presentation/queries-cli/path.js +++ b/src/presentation/queries-cli/path.ts @@ -1,24 +1,63 @@ import { kindIcon, pathData } from '../../domain/queries.js'; import { outputResult } from '../../infrastructure/result-formatter.js'; -function printNotFound(from, to, data) { +interface PathCandidate { + name: string; + file: string; + line: number; +} + +interface PathStep { + kind: string; + name: string; + file: string; + line: number; + edgeKind?: string; +} + +interface PathDataResult { + error?: string; + found?: boolean; + hops?: number; + reverse?: boolean; + maxDepth?: number; + path: PathStep[]; + fromCandidates: PathCandidate[]; + toCandidates: PathCandidate[]; + alternateCount: number; +} + +interface PathOpts { + json?: boolean; + ndjson?: boolean; + csv?: boolean; + table?: boolean; + noTests?: boolean; + reverse?: boolean; + maxDepth?: number; + file?: string; + kind?: string; + [key: string]: unknown; +} + +function printNotFound(from: string, to: string, data: PathDataResult): void { const dir = data.reverse ? 'reverse ' : ''; console.log(`No ${dir}path from "${from}" to "${to}" within ${data.maxDepth} hops.`); if (data.fromCandidates.length > 1) { console.log( - `\n "${from}" matched ${data.fromCandidates.length} symbols — using top match: ${data.fromCandidates[0].name} (${data.fromCandidates[0].file}:${data.fromCandidates[0].line})`, + `\n "${from}" matched ${data.fromCandidates.length} symbols — using top match: ${data.fromCandidates[0]!.name} (${data.fromCandidates[0]!.file}:${data.fromCandidates[0]!.line})`, ); } if (data.toCandidates.length > 1) { console.log( - ` "${to}" matched ${data.toCandidates.length} symbols — using top match: ${data.toCandidates[0].name} (${data.toCandidates[0].file}:${data.toCandidates[0].line})`, + ` "${to}" matched ${data.toCandidates.length} symbols — using top match: ${data.toCandidates[0]!.name} (${data.toCandidates[0]!.file}:${data.toCandidates[0]!.line})`, ); } } -function printPathSteps(data) { +function printPathSteps(data: PathDataResult): void { for (let i = 0; i < data.path.length; i++) { - const n = data.path[i]; + const n = data.path[i]!; const indent = ' '.repeat(i + 1); if (i === 0) { console.log(`${indent}${kindIcon(n.kind)} ${n.name} (${n.kind}) -- ${n.file}:${n.line}`); @@ -35,9 +74,14 @@ function printPathSteps(data) { } } -export function symbolPath(from, to, customDbPath, opts = {}) { - const data = pathData(from, to, customDbPath, opts); - if (outputResult(data, null, opts)) return; +export function symbolPath( + from: string, + to: string, + customDbPath: string, + opts: PathOpts = {}, +): void { + const data = pathData(from, to, customDbPath, opts) as PathDataResult; + if (outputResult(data as unknown as Record, null, opts)) return; if (data.error) { console.log(data.error); @@ -51,7 +95,7 @@ export function symbolPath(from, to, customDbPath, opts = {}) { if (data.hops === 0) { console.log(`\n"${from}" and "${to}" resolve to the same symbol (0 hops):`); - const n = data.path[0]; + const n = data.path[0]!; console.log(` ${kindIcon(n.kind)} ${n.name} (${n.kind}) -- ${n.file}:${n.line}\n`); return; } diff --git a/src/presentation/query.js b/src/presentation/query.ts similarity index 62% rename from src/presentation/query.js rename to src/presentation/query.ts index 72a5bef0..f9cbf120 100644 --- a/src/presentation/query.js +++ b/src/presentation/query.ts @@ -1,7 +1,3 @@ -/** - * Re-export all query CLI wrappers from queries-cli.js. - * This barrel file provides the standard src/commands/ import path. - */ export { children, context, diff --git a/src/presentation/result-formatter.js b/src/presentation/result-formatter.ts similarity index 57% rename from src/presentation/result-formatter.js rename to src/presentation/result-formatter.ts index b3fc843d..bc5ffcba 100644 --- a/src/presentation/result-formatter.js +++ b/src/presentation/result-formatter.ts @@ -2,16 +2,8 @@ import { loadConfig } from '../infrastructure/config.js'; import { printNdjson } from '../shared/paginate.js'; import { formatTable, truncEnd } from './table.js'; -/** - * Flatten a nested object into dot-notation keys. - * Arrays are JSON-stringified; nested objects are recursed. - * - * Note: this assumes input objects do not contain literal dot-notation keys - * (e.g. `{ "a.b": 1 }`). If they do, flattened keys will silently collide - * with nested paths (e.g. `{ a: { b: 2 } }` also produces `"a.b"`). - */ -function flattenObject(obj, prefix = '') { - const result = {}; +function flattenObject(obj: Record, prefix = ''): Record { + const result: Record = {}; for (const [key, value] of Object.entries(obj)) { const fullKey = prefix ? `${prefix}.${key}` : key; if ( @@ -20,7 +12,7 @@ function flattenObject(obj, prefix = '') { !Array.isArray(value) && Object.getPrototypeOf(value) === Object.prototype ) { - Object.assign(result, flattenObject(value, fullKey)); + Object.assign(result, flattenObject(value as Record, fullKey)); } else if (Array.isArray(value)) { result[fullKey] = JSON.stringify(value); } else { @@ -30,22 +22,20 @@ function flattenObject(obj, prefix = '') { return result; } -/** - * Flatten items array and derive column names. - * Shared by printCsv and printAutoTable. - * @returns {{ flatItems: object[], columns: string[] } | null} - */ -function prepareFlatItems(data, field) { - const items = field ? data[field] : data; +function prepareFlatItems( + data: Record, + field: string | null, +): { flatItems: Record[]; columns: string[] } | null { + const items = field ? (data[field] as unknown) : data; if (!Array.isArray(items)) return null; - const flatItems = items.map((item) => + const flatItems = items.map((item: unknown) => typeof item === 'object' && item !== null && !Array.isArray(item) - ? flattenObject(item) + ? flattenObject(item as Record) : { value: item }, ); const columns = (() => { - const keys = new Set(); + const keys = new Set(); for (const item of flatItems) for (const key of Object.keys(item)) keys.add(key); return [...keys]; })(); @@ -54,8 +44,7 @@ function prepareFlatItems(data, field) { return { flatItems, columns }; } -/** Escape a value for CSV output (LF line endings). */ -function escapeCsv(val) { +function escapeCsv(val: unknown): string { const str = val == null ? '' : String(val); if (str.includes(',') || str.includes('"') || str.includes('\n') || str.includes('\r')) { return `"${str.replace(/"/g, '""')}"`; @@ -63,12 +52,7 @@ function escapeCsv(val) { return str; } -/** - * Print data as CSV to stdout. - * @param {object} data - Result object from a *Data() function - * @param {string} field - Array field name (e.g. 'results') - */ -function printCsv(data, field) { +function printCsv(data: Record, field: string | null): boolean { const prepared = prepareFlatItems(data, field); if (!prepared) return false; const { flatItems, columns } = prepared; @@ -82,13 +66,15 @@ function printCsv(data, field) { const MAX_COL_WIDTH = 40; -/** - * Print data as an aligned table to stdout. - * @param {object} data - Result object from a *Data() function - * @param {string} field - Array field name (e.g. 'results') - * @param {{ maxColWidth?: number }} [displayOpts] - */ -function printAutoTable(data, field, displayOpts = {}) { +interface DisplayOpts { + maxColWidth?: number; +} + +function printAutoTable( + data: Record, + field: string | null, + displayOpts: DisplayOpts = {}, +): boolean { const prepared = prepareFlatItems(data, field); if (!prepared) return false; const { flatItems, columns } = prepared; @@ -108,7 +94,7 @@ function printAutoTable(data, field, displayOpts = {}) { return { header: col, width: Math.min(maxLen, colWidth), - align: isNumeric ? 'right' : 'left', + align: isNumeric ? ('right' as const) : ('left' as const), }; }); @@ -120,17 +106,22 @@ function printAutoTable(data, field, displayOpts = {}) { return true; } -/** - * Shared JSON / NDJSON / table / CSV output dispatch for CLI wrappers. - * - * @param {object} data - Result object from a *Data() function - * @param {string} field - Array field name for NDJSON streaming (e.g. 'results') - * @param {object} opts - CLI options ({ json?, ndjson?, table?, csv?, display?: { maxColWidth? } }) - * @returns {boolean} true if output was handled (caller should return early) - */ -export function outputResult(data, field, opts) { +export interface OutputOpts { + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; + display?: DisplayOpts; +} + +export function outputResult( + // biome-ignore lint/suspicious/noExplicitAny: accepts any data shape from features + data: Record, + field: string | null, + opts: OutputOpts, +): boolean { if (opts.ndjson) { - printNdjson(data, field); + printNdjson(data, field as string); return true; } if (opts.json) { @@ -141,7 +132,7 @@ export function outputResult(data, field, opts) { return printCsv(data, field) !== false; } if (opts.table) { - const displayOpts = opts.display ?? loadConfig().display; + const displayOpts = opts.display ?? (loadConfig() as { display: DisplayOpts }).display; return printAutoTable(data, field, displayOpts) !== false; } return false; diff --git a/src/presentation/sequence-renderer.js b/src/presentation/sequence-renderer.js deleted file mode 100644 index 8913230b..00000000 --- a/src/presentation/sequence-renderer.js +++ /dev/null @@ -1,43 +0,0 @@ -/** - * Mermaid sequence diagram renderer — pure data → string transform. - * - * Converts sequenceData() output into Mermaid sequenceDiagram syntax. - * No DB access, no I/O — just data in, formatted string out. - */ - -/** - * Escape special Mermaid characters in labels. - */ -function escapeMermaid(str) { - return str - .replace(//g, '>') - .replace(/:/g, '#colon;') - .replace(/"/g, '#quot;'); -} - -/** - * Convert sequenceData result to Mermaid sequenceDiagram syntax. - * @param {{ participants, messages, truncated, depth }} seqResult - * @returns {string} - */ -export function sequenceToMermaid(seqResult) { - const lines = ['sequenceDiagram']; - - for (const p of seqResult.participants) { - lines.push(` participant ${p.id} as ${escapeMermaid(p.label)}`); - } - - for (const msg of seqResult.messages) { - const arrow = msg.type === 'return' ? '-->>' : '->>'; - lines.push(` ${msg.from}${arrow}${msg.to}: ${escapeMermaid(msg.label)}`); - } - - if (seqResult.truncated && seqResult.participants.length > 0) { - lines.push( - ` note right of ${seqResult.participants[0].id}: Truncated at depth ${seqResult.depth}`, - ); - } - - return lines.join('\n'); -} diff --git a/src/presentation/sequence-renderer.ts b/src/presentation/sequence-renderer.ts new file mode 100644 index 00000000..9ced4914 --- /dev/null +++ b/src/presentation/sequence-renderer.ts @@ -0,0 +1,46 @@ +interface Participant { + id: string; + label: string; +} + +interface SequenceMessage { + from: string; + to: string; + type: string; + label: string; +} + +interface SequenceRenderData { + participants: Participant[]; + messages: SequenceMessage[]; + truncated: boolean; + depth: number; +} + +function escapeMermaid(str: string): string { + return str + .replace(//g, '>') + .replace(/:/g, '#colon;') + .replace(/"/g, '#quot;'); +} + +export function sequenceToMermaid(seqResult: SequenceRenderData): string { + const lines = ['sequenceDiagram']; + + for (const p of seqResult.participants) { + lines.push(` participant ${p.id} as ${escapeMermaid(p.label)}`); + } + + for (const msg of seqResult.messages) { + const arrow = msg.type === 'return' ? '-->>' : '->>'; + lines.push(` ${msg.from}${arrow}${msg.to}: ${escapeMermaid(msg.label)}`); + } + + if (seqResult.truncated && seqResult.participants.length > 0) { + const firstParticipant = seqResult.participants[0]!; + lines.push(` note right of ${firstParticipant.id}: Truncated at depth ${seqResult.depth}`); + } + + return lines.join('\n'); +} diff --git a/src/presentation/sequence.js b/src/presentation/sequence.ts similarity index 64% rename from src/presentation/sequence.js rename to src/presentation/sequence.ts index ff8946c0..d07808a9 100644 --- a/src/presentation/sequence.js +++ b/src/presentation/sequence.ts @@ -2,11 +2,22 @@ import { kindIcon } from '../domain/queries.js'; import { sequenceData, sequenceToMermaid } from '../features/sequence.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * CLI entry point — format sequence data as mermaid, JSON, or ndjson. - */ -export function sequence(name, dbPath, opts = {}) { - const data = sequenceData(name, dbPath, opts); +interface SequenceOpts { + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; + depth?: number; + noTests?: boolean; + file?: string; + kind?: string; + limit?: number; + offset?: number; +} + +export function sequence(name: string, dbPath: string | undefined, opts: SequenceOpts = {}): void { + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from sequenceData + const data = sequenceData(name, dbPath, opts) as any; if (outputResult(data, 'messages', opts)) return; diff --git a/src/presentation/structure.js b/src/presentation/structure.ts similarity index 63% rename from src/presentation/structure.js rename to src/presentation/structure.ts index a2679a4f..8d0ba6b6 100644 --- a/src/presentation/structure.js +++ b/src/presentation/structure.ts @@ -3,7 +3,29 @@ import { hotspotsData, moduleBoundariesData, structureData } from '../features/s export { hotspotsData, moduleBoundariesData, structureData }; -export function formatStructure(data) { +interface DirectoryEntry { + directory: string; + cohesion: number | null; + fileCount: number; + symbolCount: number; + fanIn: number; + fanOut: number; + files: Array<{ + file: string; + lineCount: number; + symbolCount: number; + fanIn: number; + fanOut: number; + }>; +} + +interface StructureResult { + count: number; + directories: DirectoryEntry[]; + warning?: string; +} + +export function formatStructure(data: StructureResult): string { if (data.count === 0) return 'No directory structure found. Run "codegraph build" first.'; const lines = [`\nProject structure (${data.count} directories):\n`]; @@ -22,12 +44,30 @@ export function formatStructure(data) { } if (data.warning) { lines.push(''); - lines.push(`⚠ ${data.warning}`); + lines.push(`\u26A0 ${data.warning}`); } return lines.join('\n'); } -export function formatHotspots(data) { +interface HotspotEntry { + name: string; + kind: string; + fileCount?: number; + cohesion?: number | null; + lineCount?: number; + symbolCount?: number; + fanIn?: number; + fanOut?: number; +} + +interface HotspotsResult { + metric: string; + level: string; + limit: number; + hotspots: HotspotEntry[]; +} + +export function formatHotspots(data: HotspotsResult): string { if (data.hotspots.length === 0) return 'No hotspots found. Run "codegraph build" first.'; const lines = [`\nHotspots by ${data.metric} (${data.level}-level, top ${data.limit}):\n`]; @@ -35,7 +75,7 @@ export function formatHotspots(data) { for (const h of data.hotspots) { const extra = h.kind === 'directory' - ? `${h.fileCount} files, cohesion=${h.cohesion !== null ? h.cohesion.toFixed(2) : 'n/a'}` + ? `${h.fileCount} files, cohesion=${h.cohesion !== null ? h.cohesion!.toFixed(2) : 'n/a'}` : `${h.lineCount || 0}L, ${h.symbolCount || 0} symbols`; lines.push( ` ${String(rank++).padStart(2)}. ${h.name} <-${h.fanIn || 0} ->${h.fanOut || 0} (${extra})`, @@ -44,7 +84,23 @@ export function formatHotspots(data) { return lines.join('\n'); } -export function formatModuleBoundaries(data) { +interface ModuleBoundaryEntry { + directory: string; + cohesion: number; + fileCount: number; + symbolCount: number; + fanIn: number; + fanOut: number; + files: string[]; +} + +interface ModuleBoundariesResult { + threshold: number; + count: number; + modules: ModuleBoundaryEntry[]; +} + +export function formatModuleBoundaries(data: ModuleBoundariesResult): string { if (data.count === 0) return `No modules found with cohesion >= ${data.threshold}.`; const lines = [`\nModule boundaries (cohesion >= ${data.threshold}, ${data.count} modules):\n`]; diff --git a/src/presentation/triage.js b/src/presentation/triage.ts similarity index 58% rename from src/presentation/triage.js rename to src/presentation/triage.ts index 8c10cb03..da86d6ca 100644 --- a/src/presentation/triage.js +++ b/src/presentation/triage.ts @@ -1,13 +1,48 @@ import { triageData } from '../features/triage.js'; import { outputResult } from '../infrastructure/result-formatter.js'; -/** - * Print triage results to console. - */ -export function triage(customDbPath, opts = {}) { - const data = triageData(customDbPath, opts); +interface TriageOpts { + json?: boolean; + ndjson?: boolean; + table?: boolean; + csv?: boolean; + noTests?: boolean; + sort?: string; + kind?: string; + role?: string; + file?: string; + minScore?: number; + limit?: number; + offset?: number; + config?: unknown; +} + +interface TriageItem { + name: string; + file: string; + role: string | null; + riskScore: number; + fanIn: number; + cognitive: number; + churn: number; + maintainabilityIndex: number; +} + +interface TriageSummary { + total: number; + analyzed: number; + avgScore: number; + maxScore: number; +} + +export function triage(customDbPath: string | undefined, opts: TriageOpts = {}): void { + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from triageData + const data = triageData(customDbPath, opts as any) as { + items: TriageItem[]; + summary: TriageSummary; + }; - if (outputResult(data, 'items', opts)) return; + if (outputResult(data as unknown as Record, 'items', opts)) return; if (data.items.length === 0) { if (data.summary.total === 0) { @@ -28,8 +63,8 @@ export function triage(customDbPath, opts = {}) { ); for (const it of data.items) { - const name = it.name.length > 33 ? `${it.name.slice(0, 32)}…` : it.name; - const file = it.file.length > 26 ? `…${it.file.slice(-25)}` : it.file; + const name = it.name.length > 33 ? `${it.name.slice(0, 32)}\u2026` : it.name; + const file = it.file.length > 26 ? `\u2026${it.file.slice(-25)}` : it.file; const role = (it.role || '-').padEnd(8); const score = it.riskScore.toFixed(2).padStart(6); const fanIn = String(it.fanIn).padStart(7); diff --git a/src/presentation/viewer.js b/src/presentation/viewer.ts similarity index 85% rename from src/presentation/viewer.js rename to src/presentation/viewer.ts index b39f9448..4ed0f1bb 100644 --- a/src/presentation/viewer.js +++ b/src/presentation/viewer.ts @@ -1,17 +1,3 @@ -/** - * Interactive HTML viewer — presentation layer. - * - * Exports two concerns: - * - renderPlotHTML(): pure data → HTML transform (no I/O) that receives - * prepared graph data and config, returns a self-contained HTML string - * with vis-network. All graph data must be pre-loaded via prepareGraphData(). - * - loadPlotConfig(): reads .plotDotCfg / .plotDotCfg.json files from disk - * and merges them with defaults. This performs filesystem I/O. - * - * Color constants are defined in ./colors.js and re-exported here for - * backward compatibility. - */ - import fs from 'node:fs'; import path from 'node:path'; import { COMMUNITY_COLORS, DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS } from './colors.js'; @@ -19,7 +5,24 @@ import { COMMUNITY_COLORS, DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS } from './co // Re-export color constants so existing consumers are unaffected export { COMMUNITY_COLORS, DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS }; -export const DEFAULT_CONFIG = { +export interface PlotConfig { + layout?: { algorithm?: string; direction?: string }; + physics?: { enabled?: boolean; nodeDistance?: number }; + nodeColors?: Record; + roleColors?: Record; + colorBy?: string; + edgeStyle?: { color?: string; smooth?: boolean }; + filter?: { kinds?: string[] | null; roles?: string[] | null; files?: string[] | null }; + title?: string; + seedStrategy?: string; + seedCount?: number; + clusterBy?: string; + sizeBy?: string; + overlays?: { complexity?: boolean; risk?: boolean }; + riskThresholds?: { highBlastRadius?: number; lowMI?: number }; +} + +export const DEFAULT_CONFIG: PlotConfig = { layout: { algorithm: 'hierarchical', direction: 'LR' }, physics: { enabled: true, nodeDistance: 150 }, nodeColors: DEFAULT_NODE_COLORS, @@ -38,16 +41,12 @@ export const DEFAULT_CONFIG = { // ─── Config Loading ────────────────────────────────────────────────── -/** - * Load .plotDotCfg or .plotDotCfg.json from given directory. - * Returns merged config with defaults. - */ -export function loadPlotConfig(dir) { +export function loadPlotConfig(dir: string): PlotConfig { for (const name of ['.plotDotCfg', '.plotDotCfg.json']) { const p = path.join(dir, name); if (fs.existsSync(p)) { try { - const raw = JSON.parse(fs.readFileSync(p, 'utf-8')); + const raw = JSON.parse(fs.readFileSync(p, 'utf-8')) as Partial; return { ...DEFAULT_CONFIG, ...raw, @@ -85,7 +84,7 @@ export function loadPlotConfig(dir) { // ─── Internal Helpers ──────────────────────────────────────────────── -export function escapeHtml(s) { +export function escapeHtml(s: string): string { return String(s) .replace(/&/g, '&') .replace(/ - + @@ -581,7 +623,7 @@ network.on('doubleClick', function(params) { document.getElementById('layoutSelect').addEventListener('change', function(e) { var val = e.target.value; if (val === 'hierarchical') { - network.setOptions({ layout: { hierarchical: { enabled: true, direction: ${JSON.stringify(cfg.layout.direction || 'LR')} } }, physics: { enabled: document.getElementById('physicsToggle').checked } }); + network.setOptions({ layout: { hierarchical: { enabled: true, direction: ${JSON.stringify(layoutDirection)} } }, physics: { enabled: document.getElementById('physicsToggle').checked } }); } else if (val === 'radial') { network.setOptions({ layout: { hierarchical: false, improvedLayout: true }, physics: { enabled: true, solver: 'repulsion', repulsion: { nodeDistance: 200 } } }); } else { @@ -627,7 +669,7 @@ document.getElementById('detailClose').addEventListener('click', hideDetail); /* ── Init ──────────────────────────────────────────────────────────── */ refreshNodeAppearance(); updateLegend(${JSON.stringify(effectiveColorBy)}); -${(cfg.clusterBy || 'none') !== 'none' ? `applyClusterBy(${JSON.stringify(cfg.clusterBy)});` : ''} +${clusterBy !== 'none' ? `applyClusterBy(${JSON.stringify(clusterBy)});` : ''} `; From f6a3d706f9412528f1d8b8fb3aece04753187018 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 23 Mar 2026 23:43:40 -0600 Subject: [PATCH 04/26] fix(types): delete stale .js counterparts and fix post-migration compilation issues Delete 13 stale .js files replaced by TypeScript in Phase 6a-6c commits. Fix type errors in presentation/, features/, mcp/, ast-analysis/ modules. Migrate src/index.js to TypeScript. Update integration tests for CLI changes. Impact: 40 functions changed, 8 affected --- src/ast-analysis/engine.ts | 4 +- src/domain/graph/builder.js | 11 - src/domain/graph/builder/context.js | 85 --- src/domain/graph/builder/helpers.js | 218 ------ src/domain/graph/builder/pipeline.js | 164 ----- src/domain/graph/watcher.js | 165 ----- src/domain/search/generator.js | 163 ----- src/domain/search/index.js | 13 - src/domain/search/models.js | 218 ------ src/domain/search/search/cli-formatter.ts | 2 + src/features/boundaries.ts | 2 +- src/features/export.js | 378 ---------- src/index.ts | 67 ++ src/mcp/index.js | 2 - src/mcp/middleware.js | 44 -- src/mcp/server.js | 143 ---- src/mcp/server.ts | 11 +- src/mcp/tool-registry.js | 852 ---------------------- src/presentation/audit.ts | 4 +- src/presentation/branch-compare.ts | 2 + src/presentation/check.ts | 8 +- src/presentation/complexity.ts | 3 +- src/presentation/owners.ts | 5 +- src/presentation/structure.ts | 3 +- src/presentation/triage.ts | 1 + src/types.ts | 35 + tests/integration/batch.test.js | 28 +- tests/integration/cli.test.js | 9 +- 28 files changed, 165 insertions(+), 2475 deletions(-) delete mode 100644 src/domain/graph/builder.js delete mode 100644 src/domain/graph/builder/context.js delete mode 100644 src/domain/graph/builder/helpers.js delete mode 100644 src/domain/graph/builder/pipeline.js delete mode 100644 src/domain/graph/watcher.js delete mode 100644 src/domain/search/generator.js delete mode 100644 src/domain/search/index.js delete mode 100644 src/domain/search/models.js delete mode 100644 src/features/export.js create mode 100644 src/index.ts delete mode 100644 src/mcp/index.js delete mode 100644 src/mcp/middleware.js delete mode 100644 src/mcp/server.js delete mode 100644 src/mcp/tool-registry.js diff --git a/src/ast-analysis/engine.ts b/src/ast-analysis/engine.ts index 050b0b5d..1cea727e 100644 --- a/src/ast-analysis/engine.ts +++ b/src/ast-analysis/engine.ts @@ -339,7 +339,7 @@ async function delegateToBuildFunctions( const t0 = performance.now(); try { const { buildAstNodes } = await import('../features/ast.js'); - await buildAstNodes(db, fileSymbols, rootDir, engineOpts); + await buildAstNodes(db, fileSymbols as Map, rootDir, engineOpts); } catch (err: unknown) { debug(`buildAstNodes failed: ${(err as Error).message}`); } @@ -350,7 +350,7 @@ async function delegateToBuildFunctions( const t0 = performance.now(); try { const { buildComplexityMetrics } = await import('../features/complexity.js'); - await buildComplexityMetrics(db, fileSymbols, rootDir, engineOpts); + await buildComplexityMetrics(db, fileSymbols as Map, rootDir, engineOpts); } catch (err: unknown) { debug(`buildComplexityMetrics failed: ${(err as Error).message}`); } diff --git a/src/domain/graph/builder.js b/src/domain/graph/builder.js deleted file mode 100644 index 134f1096..00000000 --- a/src/domain/graph/builder.js +++ /dev/null @@ -1,11 +0,0 @@ -// Barrel re-export — keeps all existing `import { ... } from './builder.js'` working. -// See src/builder/ for the pipeline implementation (ROADMAP 3.9). - -export { - collectFiles, - loadPathAliases, - purgeFilesFromGraph, - readFileSafe, -} from './builder/helpers.js'; -export { buildGraph } from './builder/pipeline.js'; -export { resolveImportPath } from './resolve.js'; diff --git a/src/domain/graph/builder/context.js b/src/domain/graph/builder/context.js deleted file mode 100644 index 31024d6b..00000000 --- a/src/domain/graph/builder/context.js +++ /dev/null @@ -1,85 +0,0 @@ -/** - * PipelineContext — shared mutable state threaded through all build stages. - * - * Each stage reads what it needs and writes what it produces. - * This replaces the closure-captured locals in the old monolithic buildGraph(). - */ -export class PipelineContext { - // ── Inputs (set during setup) ────────────────────────────────────── - /** @type {string} Absolute root directory */ - rootDir; - /** @type {import('better-sqlite3').Database} */ - db; - /** @type {string} Absolute path to the database file */ - dbPath; - /** @type {object} From loadConfig() */ - config; - /** @type {object} Original buildGraph opts */ - opts; - /** @type {{ engine: string, dataflow: boolean, ast: boolean }} */ - engineOpts; - /** @type {string} 'native' | 'wasm' */ - engineName; - /** @type {string|null} */ - engineVersion; - /** @type {{ baseUrl: string|null, paths: object }} */ - aliases; - /** @type {boolean} Whether incremental mode is enabled */ - incremental; - /** @type {boolean} Force full rebuild (engine/schema mismatch) */ - forceFullRebuild = false; - /** @type {number} Current schema version */ - schemaVersion; - - // ── File collection (set by collectFiles stage) ──────────────────── - /** @type {string[]} Absolute file paths */ - allFiles; - /** @type {Set} Absolute directory paths */ - discoveredDirs; - - // ── Change detection (set by detectChanges stage) ────────────────── - /** @type {boolean} */ - isFullBuild; - /** @type {Array<{ file: string, relPath?: string, content?: string, hash?: string, stat?: object, _reverseDepOnly?: boolean }>} */ - parseChanges; - /** @type {Array<{ relPath: string, hash: string, stat: object }>} Metadata-only self-heal updates */ - metadataUpdates; - /** @type {string[]} Relative paths of deleted files */ - removed; - /** @type {boolean} True when no changes detected — skip remaining stages */ - earlyExit = false; - - // ── Parsing (set by parseFiles stage) ────────────────────────────── - /** @type {Map} relPath → symbols from parseFilesAuto */ - allSymbols; - /** @type {Map} relPath → symbols (includes incrementally loaded) */ - fileSymbols; - /** @type {Array<{ file: string, relPath?: string }>} Files to parse this build */ - filesToParse; - - // ── Import resolution (set by resolveImports stage) ──────────────── - /** @type {Map|null} "absFile|source" → resolved path */ - batchResolved; - /** @type {Map} relPath → re-export descriptors */ - reexportMap; - /** @type {Set} Files loaded only for barrel resolution (don't rebuild edges) */ - barrelOnlyFiles; - - // ── Node lookup (set by insertNodes / buildEdges stages) ─────────── - /** @type {Map} name → node rows */ - nodesByName; - /** @type {Map} "name|file" → node rows */ - nodesByNameAndFile; - - // ── Misc state ───────────────────────────────────────────────────── - /** @type {boolean} Whether embeddings table exists */ - hasEmbeddings = false; - /** @type {Map} relPath → line count */ - lineCountMap; - - // ── Phase timing ─────────────────────────────────────────────────── - timing = {}; - - /** @type {number} performance.now() at build start */ - buildStart; -} diff --git a/src/domain/graph/builder/helpers.js b/src/domain/graph/builder/helpers.js deleted file mode 100644 index b7916c84..00000000 --- a/src/domain/graph/builder/helpers.js +++ /dev/null @@ -1,218 +0,0 @@ -/** - * Builder helper functions — shared utilities used across pipeline stages. - * - * Extracted from the monolithic builder.js so stages can import individually. - */ -import { createHash } from 'node:crypto'; -import fs from 'node:fs'; -import path from 'node:path'; -import { purgeFilesData } from '../../../db/index.js'; -import { warn } from '../../../infrastructure/logger.js'; -import { EXTENSIONS, IGNORE_DIRS } from '../../../shared/constants.js'; - -export const BUILTIN_RECEIVERS = new Set([ - 'console', - 'Math', - 'JSON', - 'Object', - 'Array', - 'String', - 'Number', - 'Boolean', - 'Date', - 'RegExp', - 'Map', - 'Set', - 'WeakMap', - 'WeakSet', - 'Promise', - 'Symbol', - 'Error', - 'TypeError', - 'RangeError', - 'Proxy', - 'Reflect', - 'Intl', - 'globalThis', - 'window', - 'document', - 'process', - 'Buffer', - 'require', -]); - -/** - * Recursively collect all source files under `dir`. - * When `directories` is a Set, also tracks which directories contain files. - */ -export function collectFiles( - dir, - files = [], - config = {}, - directories = null, - _visited = new Set(), -) { - const trackDirs = directories instanceof Set; - let hasFiles = false; - - // Merge config ignoreDirs with defaults - const extraIgnore = config.ignoreDirs ? new Set(config.ignoreDirs) : null; - - // Detect symlink loops (before I/O to avoid wasted readdirSync) - let realDir; - try { - realDir = fs.realpathSync(dir); - } catch { - return trackDirs ? { files, directories } : files; - } - if (_visited.has(realDir)) { - warn(`Symlink loop detected, skipping: ${dir}`); - return trackDirs ? { files, directories } : files; - } - _visited.add(realDir); - - let entries; - try { - entries = fs.readdirSync(dir, { withFileTypes: true }); - } catch (err) { - warn(`Cannot read directory ${dir}: ${err.message}`); - return trackDirs ? { files, directories } : files; - } - - for (const entry of entries) { - if (entry.name.startsWith('.') && entry.name !== '.') { - if (IGNORE_DIRS.has(entry.name)) continue; - if (entry.isDirectory()) continue; - } - if (IGNORE_DIRS.has(entry.name)) continue; - if (extraIgnore?.has(entry.name)) continue; - - const full = path.join(dir, entry.name); - if (entry.isDirectory()) { - collectFiles(full, files, config, directories, _visited); - } else if (EXTENSIONS.has(path.extname(entry.name))) { - files.push(full); - hasFiles = true; - } - } - if (trackDirs && hasFiles) { - directories.add(dir); - } - return trackDirs ? { files, directories } : files; -} - -/** - * Load path aliases from tsconfig.json / jsconfig.json. - */ -export function loadPathAliases(rootDir) { - const aliases = { baseUrl: null, paths: {} }; - for (const configName of ['tsconfig.json', 'jsconfig.json']) { - const configPath = path.join(rootDir, configName); - if (!fs.existsSync(configPath)) continue; - try { - const raw = fs - .readFileSync(configPath, 'utf-8') - .replace(/\/\/.*$/gm, '') - .replace(/\/\*[\s\S]*?\*\//g, '') - .replace(/,\s*([\]}])/g, '$1'); - const config = JSON.parse(raw); - const opts = config.compilerOptions || {}; - if (opts.baseUrl) aliases.baseUrl = path.resolve(rootDir, opts.baseUrl); - if (opts.paths) { - for (const [pattern, targets] of Object.entries(opts.paths)) { - aliases.paths[pattern] = targets.map((t) => path.resolve(aliases.baseUrl || rootDir, t)); - } - } - break; - } catch (err) { - warn(`Failed to parse ${configName}: ${err.message}`); - } - } - return aliases; -} - -/** - * Compute MD5 hash of file contents for incremental builds. - */ -export function fileHash(content) { - return createHash('md5').update(content).digest('hex'); -} - -/** - * Stat a file, returning { mtimeMs, size } or null on error. - */ -export function fileStat(filePath) { - try { - const s = fs.statSync(filePath); - return { mtimeMs: s.mtimeMs, size: s.size }; - } catch { - return null; - } -} - -/** - * Read a file with retry on transient errors (EBUSY/EACCES/EPERM). - */ -const TRANSIENT_CODES = new Set(['EBUSY', 'EACCES', 'EPERM']); -const RETRY_DELAY_MS = 50; - -export function readFileSafe(filePath, retries = 2) { - for (let attempt = 0; ; attempt++) { - try { - return fs.readFileSync(filePath, 'utf-8'); - } catch (err) { - if (attempt < retries && TRANSIENT_CODES.has(err.code)) { - const end = Date.now() + RETRY_DELAY_MS; - while (Date.now() < end) {} - continue; - } - throw err; - } - } -} - -/** - * Purge all graph data for the specified files. - */ -export function purgeFilesFromGraph(db, files, options = {}) { - purgeFilesData(db, files, options); -} - -/** Batch INSERT chunk size for multi-value INSERTs. */ -const BATCH_CHUNK = 200; - -/** - * Batch-insert node rows via multi-value INSERT statements. - * Each row: [name, kind, file, line, end_line, parent_id, qualified_name, scope, visibility] - */ -export function batchInsertNodes(db, rows) { - if (!rows.length) return; - const ph = '(?,?,?,?,?,?,?,?,?)'; - for (let i = 0; i < rows.length; i += BATCH_CHUNK) { - const chunk = rows.slice(i, i + BATCH_CHUNK); - const vals = []; - for (const r of chunk) vals.push(r[0], r[1], r[2], r[3], r[4], r[5], r[6], r[7], r[8]); - db.prepare( - 'INSERT OR IGNORE INTO nodes (name,kind,file,line,end_line,parent_id,qualified_name,scope,visibility) VALUES ' + - chunk.map(() => ph).join(','), - ).run(...vals); - } -} - -/** - * Batch-insert edge rows via multi-value INSERT statements. - * Each row: [source_id, target_id, kind, confidence, dynamic] - */ -export function batchInsertEdges(db, rows) { - if (!rows.length) return; - const ph = '(?,?,?,?,?)'; - for (let i = 0; i < rows.length; i += BATCH_CHUNK) { - const chunk = rows.slice(i, i + BATCH_CHUNK); - const vals = []; - for (const r of chunk) vals.push(r[0], r[1], r[2], r[3], r[4]); - db.prepare( - 'INSERT INTO edges (source_id,target_id,kind,confidence,dynamic) VALUES ' + - chunk.map(() => ph).join(','), - ).run(...vals); - } -} diff --git a/src/domain/graph/builder/pipeline.js b/src/domain/graph/builder/pipeline.js deleted file mode 100644 index 0343e255..00000000 --- a/src/domain/graph/builder/pipeline.js +++ /dev/null @@ -1,164 +0,0 @@ -/** - * Pipeline orchestrator — runs build stages sequentially through a shared PipelineContext. - * - * This is the heart of the builder refactor (ROADMAP 3.9): the monolithic buildGraph() - * is decomposed into independently testable stages that communicate via PipelineContext. - */ -import path from 'node:path'; -import { performance } from 'node:perf_hooks'; -import { closeDb, getBuildMeta, initSchema, MIGRATIONS, openDb } from '../../../db/index.js'; -import { detectWorkspaces, loadConfig } from '../../../infrastructure/config.js'; -import { info } from '../../../infrastructure/logger.js'; -import { getActiveEngine } from '../../parser.js'; -import { setWorkspaces } from '../resolve.js'; -import { PipelineContext } from './context.js'; -import { loadPathAliases } from './helpers.js'; -import { buildEdges } from './stages/build-edges.js'; -import { buildStructure } from './stages/build-structure.js'; -// Pipeline stages -import { collectFiles } from './stages/collect-files.js'; -import { detectChanges } from './stages/detect-changes.js'; -import { finalize } from './stages/finalize.js'; -import { insertNodes } from './stages/insert-nodes.js'; -import { parseFiles } from './stages/parse-files.js'; -import { resolveImports } from './stages/resolve-imports.js'; -import { runAnalyses } from './stages/run-analyses.js'; - -// ── Setup helpers ─────────────────────────────────────────────────────── - -function initializeEngine(ctx) { - ctx.engineOpts = { - engine: ctx.opts.engine || 'auto', - dataflow: ctx.opts.dataflow !== false, - ast: ctx.opts.ast !== false, - }; - const { name: engineName, version: engineVersion } = getActiveEngine(ctx.engineOpts); - ctx.engineName = engineName; - ctx.engineVersion = engineVersion; - info(`Using ${engineName} engine${engineVersion ? ` (v${engineVersion})` : ''}`); -} - -function checkEngineSchemaMismatch(ctx) { - ctx.schemaVersion = MIGRATIONS[MIGRATIONS.length - 1].version; - ctx.forceFullRebuild = false; - if (!ctx.incremental) return; - - const prevEngine = getBuildMeta(ctx.db, 'engine'); - if (prevEngine && prevEngine !== ctx.engineName) { - info(`Engine changed (${prevEngine} → ${ctx.engineName}), promoting to full rebuild.`); - ctx.forceFullRebuild = true; - } - const prevSchema = getBuildMeta(ctx.db, 'schema_version'); - if (prevSchema && Number(prevSchema) !== ctx.schemaVersion) { - info( - `Schema version changed (${prevSchema} → ${ctx.schemaVersion}), promoting to full rebuild.`, - ); - ctx.forceFullRebuild = true; - } -} - -function loadAliases(ctx) { - ctx.aliases = loadPathAliases(ctx.rootDir); - if (ctx.config.aliases) { - for (const [key, value] of Object.entries(ctx.config.aliases)) { - const pattern = key.endsWith('/') ? `${key}*` : key; - const target = path.resolve(ctx.rootDir, value); - ctx.aliases.paths[pattern] = [target.endsWith('/') ? `${target}*` : `${target}/*`]; - } - } - if (ctx.aliases.baseUrl || Object.keys(ctx.aliases.paths).length > 0) { - info( - `Loaded path aliases: baseUrl=${ctx.aliases.baseUrl || 'none'}, ${Object.keys(ctx.aliases.paths).length} path mappings`, - ); - } -} - -function setupPipeline(ctx) { - ctx.rootDir = path.resolve(ctx.rootDir); - ctx.dbPath = path.join(ctx.rootDir, '.codegraph', 'graph.db'); - ctx.db = openDb(ctx.dbPath); - initSchema(ctx.db); - - ctx.config = loadConfig(ctx.rootDir); - ctx.incremental = - ctx.opts.incremental !== false && ctx.config.build && ctx.config.build.incremental !== false; - - initializeEngine(ctx); - checkEngineSchemaMismatch(ctx); - loadAliases(ctx); - - // Workspace packages (monorepo) - const workspaces = detectWorkspaces(ctx.rootDir); - if (workspaces.size > 0) { - setWorkspaces(ctx.rootDir, workspaces); - info(`Detected ${workspaces.size} workspace packages`); - } - - ctx.timing.setupMs = performance.now() - ctx.buildStart; -} - -function formatTimingResult(ctx) { - return { - phases: { - setupMs: +ctx.timing.setupMs.toFixed(1), - parseMs: +(ctx.timing.parseMs ?? 0).toFixed(1), - insertMs: +(ctx.timing.insertMs ?? 0).toFixed(1), - resolveMs: +(ctx.timing.resolveMs ?? 0).toFixed(1), - edgesMs: +(ctx.timing.edgesMs ?? 0).toFixed(1), - structureMs: +(ctx.timing.structureMs ?? 0).toFixed(1), - rolesMs: +(ctx.timing.rolesMs ?? 0).toFixed(1), - astMs: +(ctx.timing.astMs ?? 0).toFixed(1), - complexityMs: +(ctx.timing.complexityMs ?? 0).toFixed(1), - ...(ctx.timing.cfgMs != null && { cfgMs: +ctx.timing.cfgMs.toFixed(1) }), - ...(ctx.timing.dataflowMs != null && { dataflowMs: +ctx.timing.dataflowMs.toFixed(1) }), - finalizeMs: +(ctx.timing.finalizeMs ?? 0).toFixed(1), - }, - }; -} - -// ── Pipeline stages execution ─────────────────────────────────────────── - -async function runPipelineStages(ctx) { - await collectFiles(ctx); - await detectChanges(ctx); - - if (ctx.earlyExit) return; - - await parseFiles(ctx); - await insertNodes(ctx); - await resolveImports(ctx); - await buildEdges(ctx); - await buildStructure(ctx); - await runAnalyses(ctx); - await finalize(ctx); -} - -// ── Main entry point ──────────────────────────────────────────────────── - -/** - * Build the dependency graph for a codebase. - * - * Signature and return value are identical to the original monolithic buildGraph(). - * - * @param {string} rootDir - Root directory to scan - * @param {object} [opts] - Build options - * @returns {Promise<{ phases: object } | undefined>} - */ -export async function buildGraph(rootDir, opts = {}) { - const ctx = new PipelineContext(); - ctx.buildStart = performance.now(); - ctx.opts = opts; - ctx.rootDir = rootDir; - - try { - setupPipeline(ctx); - await runPipelineStages(ctx); - } catch (err) { - if (!ctx.earlyExit) closeDb(ctx.db); - throw err; - } - - if (ctx.earlyExit) return; - - return formatTimingResult(ctx); -} diff --git a/src/domain/graph/watcher.js b/src/domain/graph/watcher.js deleted file mode 100644 index 3fea7954..00000000 --- a/src/domain/graph/watcher.js +++ /dev/null @@ -1,165 +0,0 @@ -import fs from 'node:fs'; -import path from 'node:path'; -import { closeDb, getNodeId as getNodeIdQuery, initSchema, openDb } from '../../db/index.js'; -import { info } from '../../infrastructure/logger.js'; -import { EXTENSIONS, IGNORE_DIRS, normalizePath } from '../../shared/constants.js'; -import { DbError } from '../../shared/errors.js'; -import { createParseTreeCache, getActiveEngine } from '../parser.js'; -import { rebuildFile } from './builder/incremental.js'; -import { appendChangeEvents, buildChangeEvent, diffSymbols } from './change-journal.js'; -import { appendJournalEntries } from './journal.js'; - -function shouldIgnore(filePath) { - const parts = filePath.split(path.sep); - return parts.some((p) => IGNORE_DIRS.has(p)); -} - -function isTrackedExt(filePath) { - return EXTENSIONS.has(path.extname(filePath)); -} - -export async function watchProject(rootDir, opts = {}) { - const dbPath = path.join(rootDir, '.codegraph', 'graph.db'); - if (!fs.existsSync(dbPath)) { - throw new DbError('No graph.db found. Run `codegraph build` first.', { file: dbPath }); - } - - const db = openDb(dbPath); - initSchema(db); - const engineOpts = { engine: opts.engine || 'auto' }; - const { name: engineName, version: engineVersion } = getActiveEngine(engineOpts); - console.log( - `Watch mode using ${engineName} engine${engineVersion ? ` (v${engineVersion})` : ''}`, - ); - - const cache = createParseTreeCache(); - console.log( - cache - ? 'Incremental parsing enabled (native tree cache)' - : 'Incremental parsing unavailable (full re-parse)', - ); - - const stmts = { - insertNode: db.prepare( - 'INSERT OR IGNORE INTO nodes (name, kind, file, line, end_line) VALUES (?, ?, ?, ?, ?)', - ), - getNodeId: { - get: (name, kind, file, line) => { - const id = getNodeIdQuery(db, name, kind, file, line); - return id != null ? { id } : undefined; - }, - }, - insertEdge: db.prepare( - 'INSERT INTO edges (source_id, target_id, kind, confidence, dynamic) VALUES (?, ?, ?, ?, ?)', - ), - deleteNodes: db.prepare('DELETE FROM nodes WHERE file = ?'), - deleteEdgesForFile: null, - countNodes: db.prepare('SELECT COUNT(*) as c FROM nodes WHERE file = ?'), - countEdgesForFile: null, - findNodeInFile: db.prepare( - "SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module', 'constant') AND file = ?", - ), - findNodeByName: db.prepare( - "SELECT id, file FROM nodes WHERE name = ? AND kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module', 'constant')", - ), - listSymbols: db.prepare("SELECT name, kind, line FROM nodes WHERE file = ? AND kind != 'file'"), - }; - - // Use named params for statements needing the same value twice - const origDeleteEdges = db.prepare( - `DELETE FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = @f) OR target_id IN (SELECT id FROM nodes WHERE file = @f)`, - ); - const origCountEdges = db.prepare( - `SELECT COUNT(*) as c FROM edges WHERE source_id IN (SELECT id FROM nodes WHERE file = @f) OR target_id IN (SELECT id FROM nodes WHERE file = @f)`, - ); - stmts.deleteEdgesForFile = { run: (f) => origDeleteEdges.run({ f }) }; - stmts.countEdgesForFile = { get: (f) => origCountEdges.get({ f }) }; - - const pending = new Set(); - let timer = null; - const DEBOUNCE_MS = 300; - - async function processPending() { - const files = [...pending]; - pending.clear(); - - const results = []; - for (const filePath of files) { - const result = await rebuildFile(db, rootDir, filePath, stmts, engineOpts, cache, { - diffSymbols, - }); - if (result) results.push(result); - } - const updates = results; - - // Append processed files to journal for Tier 0 detection on next build - if (updates.length > 0) { - const entries = updates.map((r) => ({ - file: r.file, - deleted: r.deleted || false, - })); - try { - appendJournalEntries(rootDir, entries); - } catch { - /* journal write failure is non-fatal */ - } - - const changeEvents = updates.map((r) => - buildChangeEvent(r.file, r.event, r.symbolDiff, { - nodesBefore: r.nodesBefore, - nodesAfter: r.nodesAfter, - edgesAdded: r.edgesAdded, - }), - ); - try { - appendChangeEvents(rootDir, changeEvents); - } catch { - /* change event write failure is non-fatal */ - } - } - - for (const r of updates) { - const nodeDelta = r.nodesAdded - r.nodesRemoved; - const nodeStr = nodeDelta >= 0 ? `+${nodeDelta}` : `${nodeDelta}`; - if (r.deleted) { - info(`Removed: ${r.file} (-${r.nodesRemoved} nodes)`); - } else { - info(`Updated: ${r.file} (${nodeStr} nodes, +${r.edgesAdded} edges)`); - } - } - } - - console.log(`Watching ${rootDir} for changes...`); - console.log('Press Ctrl+C to stop.\n'); - - const watcher = fs.watch(rootDir, { recursive: true }, (_eventType, filename) => { - if (!filename) return; - if (shouldIgnore(filename)) return; - if (!isTrackedExt(filename)) return; - - const fullPath = path.join(rootDir, filename); - pending.add(fullPath); - - if (timer) clearTimeout(timer); - timer = setTimeout(processPending, DEBOUNCE_MS); - }); - - process.on('SIGINT', () => { - console.log('\nStopping watcher...'); - watcher.close(); - // Flush any pending file paths to journal before exit - if (pending.size > 0) { - const entries = [...pending].map((filePath) => ({ - file: normalizePath(path.relative(rootDir, filePath)), - })); - try { - appendJournalEntries(rootDir, entries); - } catch { - /* best-effort */ - } - } - if (cache) cache.clear(); - closeDb(db); - process.exit(0); - }); -} diff --git a/src/domain/search/generator.js b/src/domain/search/generator.js deleted file mode 100644 index dcdfd36c..00000000 --- a/src/domain/search/generator.js +++ /dev/null @@ -1,163 +0,0 @@ -import fs from 'node:fs'; -import path from 'node:path'; -import { closeDb, findDbPath, openDb } from '../../db/index.js'; -import { warn } from '../../infrastructure/logger.js'; -import { DbError } from '../../shared/errors.js'; -import { embed, getModelConfig } from './models.js'; -import { buildSourceText } from './strategies/source.js'; -import { buildStructuredText } from './strategies/structured.js'; - -/** - * Rough token estimate (~4 chars per token for code/English). - * Conservative — avoids adding a tokenizer dependency. - */ -export function estimateTokens(text) { - return Math.ceil(text.length / 4); -} - -function initEmbeddingsSchema(db) { - db.exec(` - CREATE TABLE IF NOT EXISTS embeddings ( - node_id INTEGER PRIMARY KEY, - vector BLOB NOT NULL, - text_preview TEXT, - FOREIGN KEY(node_id) REFERENCES nodes(id) - ); - CREATE TABLE IF NOT EXISTS embedding_meta ( - key TEXT PRIMARY KEY, - value TEXT - ); - `); - - // Add full_text column (idempotent — ignore if already exists) - try { - db.exec('ALTER TABLE embeddings ADD COLUMN full_text TEXT'); - } catch { - /* column already exists */ - } - - // FTS5 virtual table for BM25 keyword search - db.exec(` - CREATE VIRTUAL TABLE IF NOT EXISTS fts_index USING fts5( - name, - content, - tokenize='unicode61' - ); - `); -} - -/** - * Build embeddings for all functions/methods/classes in the graph. - * @param {string} rootDir - Project root directory - * @param {string} modelKey - Model identifier from MODELS registry - * @param {string} [customDbPath] - Override path to graph.db - * @param {object} [options] - Embedding options - * @param {string} [options.strategy='structured'] - 'structured' (graph-enriched) or 'source' (raw code) - */ -export async function buildEmbeddings(rootDir, modelKey, customDbPath, options = {}) { - const strategy = options.strategy || 'structured'; - const dbPath = customDbPath || findDbPath(null); - - if (!fs.existsSync(dbPath)) { - throw new DbError( - `No codegraph database found at ${dbPath}.\nRun "codegraph build" first to analyze your codebase.`, - { file: dbPath }, - ); - } - - const db = openDb(dbPath); - initEmbeddingsSchema(db); - - db.exec('DELETE FROM embeddings'); - db.exec('DELETE FROM embedding_meta'); - db.exec('DELETE FROM fts_index'); - - const nodes = db - .prepare( - `SELECT * FROM nodes WHERE kind IN ('function', 'method', 'class') ORDER BY file, line`, - ) - .all(); - - console.log(`Building embeddings for ${nodes.length} symbols (strategy: ${strategy})...`); - - const byFile = new Map(); - for (const node of nodes) { - if (!byFile.has(node.file)) byFile.set(node.file, []); - byFile.get(node.file).push(node); - } - - const texts = []; - const nodeIds = []; - const nodeNames = []; - const previews = []; - const config = getModelConfig(modelKey); - const contextWindow = config.contextWindow; - let overflowCount = 0; - - for (const [file, fileNodes] of byFile) { - const fullPath = path.join(rootDir, file); - let lines; - try { - lines = fs.readFileSync(fullPath, 'utf-8').split('\n'); - } catch (err) { - warn(`Cannot read ${file} for embeddings: ${err.message}`); - continue; - } - - for (const node of fileNodes) { - let text = - strategy === 'structured' - ? buildStructuredText(node, file, lines, db) - : buildSourceText(node, file, lines); - - // Detect and handle context window overflow - const tokens = estimateTokens(text); - if (tokens > contextWindow) { - overflowCount++; - const maxChars = contextWindow * 4; - text = text.slice(0, maxChars); - } - - texts.push(text); - nodeIds.push(node.id); - nodeNames.push(node.name); - previews.push(`${node.name} (${node.kind}) -- ${file}:${node.line}`); - } - } - - if (overflowCount > 0) { - warn( - `${overflowCount} symbol(s) exceeded model context window (${contextWindow} tokens) and were truncated`, - ); - } - - console.log(`Embedding ${texts.length} symbols...`); - const { vectors, dim } = await embed(texts, modelKey); - - const insert = db.prepare( - 'INSERT OR REPLACE INTO embeddings (node_id, vector, text_preview, full_text) VALUES (?, ?, ?, ?)', - ); - const insertFts = db.prepare('INSERT INTO fts_index(rowid, name, content) VALUES (?, ?, ?)'); - const insertMeta = db.prepare('INSERT OR REPLACE INTO embedding_meta (key, value) VALUES (?, ?)'); - const insertAll = db.transaction(() => { - for (let i = 0; i < vectors.length; i++) { - insert.run(nodeIds[i], Buffer.from(vectors[i].buffer), previews[i], texts[i]); - insertFts.run(nodeIds[i], nodeNames[i], texts[i]); - } - insertMeta.run('model', config.name); - insertMeta.run('dim', String(dim)); - insertMeta.run('count', String(vectors.length)); - insertMeta.run('fts_count', String(vectors.length)); - insertMeta.run('strategy', strategy); - insertMeta.run('built_at', new Date().toISOString()); - if (overflowCount > 0) { - insertMeta.run('truncated_count', String(overflowCount)); - } - }); - insertAll(); - - console.log( - `\nStored ${vectors.length} embeddings (${dim}d, ${config.name}, strategy: ${strategy}) in graph.db`, - ); - closeDb(db); -} diff --git a/src/domain/search/index.js b/src/domain/search/index.js deleted file mode 100644 index bac3c60d..00000000 --- a/src/domain/search/index.js +++ /dev/null @@ -1,13 +0,0 @@ -/** - * Embeddings subsystem — public API barrel. - * - * Re-exports everything consumers previously imported from `../embedder.js`. - */ - -export { buildEmbeddings, estimateTokens } from './generator.js'; -export { DEFAULT_MODEL, disposeModel, EMBEDDING_STRATEGIES, embed, MODELS } from './models.js'; -export { search } from './search/cli-formatter.js'; -export { hybridSearchData } from './search/hybrid.js'; -export { ftsSearchData } from './search/keyword.js'; -export { multiSearchData, searchData } from './search/semantic.js'; -export { cosineSim } from './stores/sqlite-blob.js'; diff --git a/src/domain/search/models.js b/src/domain/search/models.js deleted file mode 100644 index 404952cc..00000000 --- a/src/domain/search/models.js +++ /dev/null @@ -1,218 +0,0 @@ -import { execFileSync } from 'node:child_process'; -import { createInterface } from 'node:readline'; -import { info } from '../../infrastructure/logger.js'; -import { ConfigError, EngineError } from '../../shared/errors.js'; - -// Lazy-load transformers (heavy, optional module) -let pipeline = null; -let extractor = null; -let activeModel = null; - -export const MODELS = { - minilm: { - name: 'Xenova/all-MiniLM-L6-v2', - dim: 384, - contextWindow: 256, - desc: 'Smallest, fastest (~23MB). General text.', - quantized: true, - }, - 'jina-small': { - name: 'Xenova/jina-embeddings-v2-small-en', - dim: 512, - contextWindow: 8192, - desc: 'Small, good quality (~33MB). General text.', - quantized: false, - }, - 'jina-base': { - name: 'Xenova/jina-embeddings-v2-base-en', - dim: 768, - contextWindow: 8192, - desc: 'Good quality (~137MB). General text, 8192 token context.', - quantized: false, - }, - 'jina-code': { - name: 'Xenova/jina-embeddings-v2-base-code', - dim: 768, - contextWindow: 8192, - desc: 'Code-aware (~137MB). Trained on code+text, best for code search.', - quantized: false, - }, - nomic: { - name: 'Xenova/nomic-embed-text-v1', - dim: 768, - contextWindow: 8192, - desc: 'Good local quality (~137MB). 8192 context.', - quantized: false, - }, - 'nomic-v1.5': { - name: 'nomic-ai/nomic-embed-text-v1.5', - dim: 768, - contextWindow: 8192, - desc: 'Improved nomic (~137MB). Matryoshka dimensions, 8192 context.', - quantized: false, - }, - 'bge-large': { - name: 'Xenova/bge-large-en-v1.5', - dim: 1024, - contextWindow: 512, - desc: 'Best general retrieval (~335MB). Top MTEB scores.', - quantized: false, - }, -}; - -export const EMBEDDING_STRATEGIES = ['structured', 'source']; - -export const DEFAULT_MODEL = 'nomic-v1.5'; -const BATCH_SIZE_MAP = { - minilm: 32, - 'jina-small': 16, - 'jina-base': 8, - 'jina-code': 8, - nomic: 8, - 'nomic-v1.5': 8, - 'bge-large': 4, -}; -const DEFAULT_BATCH_SIZE = 32; - -/** @internal Used by generator.js — not part of the public barrel. */ -export function getModelConfig(modelKey) { - const key = modelKey || DEFAULT_MODEL; - const config = MODELS[key]; - if (!config) { - throw new ConfigError(`Unknown model: ${key}. Available: ${Object.keys(MODELS).join(', ')}`); - } - return config; -} - -/** - * Prompt the user to install a missing package interactively. - * Returns true if the package was installed, false otherwise. - * Skips the prompt entirely in non-TTY environments (CI, piped stdin). - * @internal Not part of the public barrel. - */ -export function promptInstall(packageName) { - if (!process.stdin.isTTY) return Promise.resolve(false); - - return new Promise((resolve) => { - const rl = createInterface({ input: process.stdin, output: process.stderr }); - rl.question(`Semantic search requires ${packageName}. Install it now? [y/N] `, (answer) => { - rl.close(); - if (answer.trim().toLowerCase() !== 'y') return resolve(false); - try { - execFileSync('npm', ['install', packageName], { - stdio: 'inherit', - timeout: 300_000, - }); - resolve(true); - } catch { - resolve(false); - } - }); - }); -} - -/** - * Lazy-load @huggingface/transformers. - * If the package is missing, prompts the user to install it interactively. - * In non-TTY environments, prints an error and exits. - * @internal Not part of the public barrel. - */ -export async function loadTransformers() { - try { - return await import('@huggingface/transformers'); - } catch { - const pkg = '@huggingface/transformers'; - const installed = await promptInstall(pkg); - if (installed) { - try { - return await import(pkg); - } catch (loadErr) { - throw new EngineError( - `${pkg} was installed but failed to load. Please check your environment.`, - { cause: loadErr }, - ); - } - } - throw new EngineError(`Semantic search requires ${pkg}.\nInstall it with: npm install ${pkg}`); - } -} - -/** - * Dispose the current ONNX session and free memory. - * Safe to call when no model is loaded (no-op). - */ -export async function disposeModel() { - if (extractor) { - await extractor.dispose(); - extractor = null; - } - activeModel = null; -} - -async function loadModel(modelKey) { - const config = getModelConfig(modelKey); - - if (extractor && activeModel === config.name) return { extractor, config }; - - // Dispose previous model before loading a different one - await disposeModel(); - - const transformers = await loadTransformers(); - pipeline = transformers.pipeline; - - info(`Loading embedding model: ${config.name} (${config.dim}d)...`); - const pipelineOpts = config.quantized ? { quantized: true } : {}; - try { - extractor = await pipeline('feature-extraction', config.name, pipelineOpts); - } catch (err) { - const msg = err.message || String(err); - if (msg.includes('Unauthorized') || msg.includes('401') || msg.includes('gated')) { - throw new EngineError( - `Model "${config.name}" requires authentication.\n` + - `This model is gated on HuggingFace and needs an access token.\n\n` + - `Options:\n` + - ` 1. Set HF_TOKEN env var: export HF_TOKEN=hf_...\n` + - ` 2. Use a public model instead: codegraph embed --model minilm`, - { cause: err }, - ); - } - throw new EngineError( - `Failed to load model "${config.name}": ${msg}\n` + - `Try a different model: codegraph embed --model minilm`, - { cause: err }, - ); - } - activeModel = config.name; - info('Model loaded.'); - return { extractor, config }; -} - -/** - * Generate embeddings for an array of texts. - */ -export async function embed(texts, modelKey) { - const { extractor: ext, config } = await loadModel(modelKey); - const dim = config.dim; - const results = []; - const batchSize = BATCH_SIZE_MAP[modelKey || DEFAULT_MODEL] || DEFAULT_BATCH_SIZE; - - for (let i = 0; i < texts.length; i += batchSize) { - const batch = texts.slice(i, i + batchSize); - const output = await ext(batch, { pooling: 'mean', normalize: true }); - - for (let j = 0; j < batch.length; j++) { - const start = j * dim; - const vec = new Float32Array(dim); - for (let k = 0; k < dim; k++) { - vec[k] = output.data[start + k]; - } - results.push(vec); - } - - if (texts.length > batchSize) { - process.stdout.write(` Embedded ${Math.min(i + batchSize, texts.length)}/${texts.length}\r`); - } - } - - return { vectors: results, dim }; -} diff --git a/src/domain/search/search/cli-formatter.ts b/src/domain/search/search/cli-formatter.ts index 1081afc7..70a5afa8 100644 --- a/src/domain/search/search/cli-formatter.ts +++ b/src/domain/search/search/cli-formatter.ts @@ -7,6 +7,8 @@ import { multiSearchData, searchData } from './semantic.js'; interface SearchOpts extends SemanticSearchOpts { mode?: 'hybrid' | 'semantic' | 'keyword'; json?: boolean; + ndjson?: boolean; + offset?: number; } export async function search( diff --git a/src/features/boundaries.ts b/src/features/boundaries.ts index 672ba55b..2580b90d 100644 --- a/src/features/boundaries.ts +++ b/src/features/boundaries.ts @@ -245,7 +245,7 @@ function classifyFile(filePath: string, modules: Map): s return null; } -interface BoundaryViolation { +export interface BoundaryViolation { rule: string; name: string; file: string; diff --git a/src/features/export.js b/src/features/export.js deleted file mode 100644 index 3bd064e3..00000000 --- a/src/features/export.js +++ /dev/null @@ -1,378 +0,0 @@ -import path from 'node:path'; -import { isTestFile } from '../infrastructure/test-filter.js'; -import { - renderFileLevelDOT, - renderFileLevelGraphML, - renderFileLevelMermaid, - renderFileLevelNeo4jCSV, - renderFunctionLevelDOT, - renderFunctionLevelGraphML, - renderFunctionLevelMermaid, - renderFunctionLevelNeo4jCSV, -} from '../presentation/export.js'; -import { paginateResult } from '../shared/paginate.js'; - -const DEFAULT_MIN_CONFIDENCE = 0.5; - -// ─── Shared data loaders ───────────────────────────────────────────── - -/** - * Load file-level edges from DB with filtering. - * @param {object} db - * @param {object} opts - * @param {boolean} [opts.includeKind] - Include edge_kind in SELECT DISTINCT - * @param {boolean} [opts.includeConfidence] - Include confidence (adds a column to DISTINCT — use only when needed) - * @returns {{ edges: Array, totalEdges: number }} - */ -function loadFileLevelEdges( - db, - { noTests, minConfidence, limit, includeKind = false, includeConfidence = false }, -) { - const minConf = minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const kindClause = includeKind ? ', e.kind AS edge_kind' : ''; - const confidenceClause = includeConfidence ? ', e.confidence' : ''; - let edges = db - .prepare( - ` - SELECT DISTINCT n1.file AS source, n2.file AS target${kindClause}${confidenceClause} - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.kind IN ('imports', 'imports-type', 'calls') - AND e.confidence >= ? - `, - ) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - const totalEdges = edges.length; - if (limit && edges.length > limit) edges = edges.slice(0, limit); - return { edges, totalEdges }; -} - -/** - * Load function-level edges from DB with filtering. - * Returns the maximal field set needed by any serializer. - * @returns {{ edges: Array, totalEdges: number }} - */ -function loadFunctionLevelEdges(db, { noTests, minConfidence, limit }) { - const minConf = minConfidence ?? DEFAULT_MIN_CONFIDENCE; - let edges = db - .prepare( - ` - SELECT n1.id AS source_id, n1.name AS source_name, n1.kind AS source_kind, - n1.file AS source_file, n1.line AS source_line, n1.role AS source_role, - n2.id AS target_id, n2.name AS target_name, n2.kind AS target_kind, - n2.file AS target_file, n2.line AS target_line, n2.role AS target_role, - e.kind AS edge_kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module', 'constant') - AND n2.kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module', 'constant') - AND e.kind = 'calls' - AND e.confidence >= ? - `, - ) - .all(minConf); - if (noTests) - edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - const totalEdges = edges.length; - if (limit && edges.length > limit) edges = edges.slice(0, limit); - return { edges, totalEdges }; -} - -/** - * Load directory groupings for file-level graphs. - * Uses DB directory nodes if available, falls back to path.dirname(). - * @returns {Array<{ name: string, files: Array<{ path: string, basename: string }>, cohesion: number|null }>} - */ -function loadDirectoryGroups(db, allFiles) { - const hasDirectoryNodes = - db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; - - const dirs = new Map(); - - if (hasDirectoryNodes) { - const dbDirs = db - .prepare(` - SELECT n.id, n.name, nm.cohesion - FROM nodes n - LEFT JOIN node_metrics nm ON n.id = nm.node_id - WHERE n.kind = 'directory' - `) - .all(); - - for (const d of dbDirs) { - const containedFiles = db - .prepare(` - SELECT n.name FROM edges e - JOIN nodes n ON e.target_id = n.id - WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' - `) - .all(d.id) - .map((r) => r.name) - .filter((f) => allFiles.has(f)); - - if (containedFiles.length > 0) { - dirs.set(d.name, { files: containedFiles, cohesion: d.cohesion ?? null }); - } - } - } else { - for (const file of allFiles) { - const dir = path.dirname(file) || '.'; - if (!dirs.has(dir)) dirs.set(dir, { files: [], cohesion: null }); - dirs.get(dir).files.push(file); - } - } - - return [...dirs] - .sort((a, b) => a[0].localeCompare(b[0])) - .map(([name, info]) => ({ - name, - files: info.files.map((f) => ({ path: f, basename: path.basename(f) })), - cohesion: info.cohesion, - })); -} - -/** - * Load directory groupings for Mermaid file-level graphs (simplified — no cohesion, string arrays). - */ -function loadMermaidDirectoryGroups(db, allFiles) { - const hasDirectoryNodes = - db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'directory'").get().c > 0; - - const dirs = new Map(); - - if (hasDirectoryNodes) { - const dbDirs = db.prepare("SELECT id, name FROM nodes WHERE kind = 'directory'").all(); - for (const d of dbDirs) { - const containedFiles = db - .prepare(` - SELECT n.name FROM edges e - JOIN nodes n ON e.target_id = n.id - WHERE e.source_id = ? AND e.kind = 'contains' AND n.kind = 'file' - `) - .all(d.id) - .map((r) => r.name) - .filter((f) => allFiles.has(f)); - if (containedFiles.length > 0) dirs.set(d.name, containedFiles); - } - } else { - for (const file of allFiles) { - const dir = path.dirname(file) || '.'; - if (!dirs.has(dir)) dirs.set(dir, []); - dirs.get(dir).push(file); - } - } - - return [...dirs] - .sort((a, b) => a[0].localeCompare(b[0])) - .map(([name, files]) => ({ name, files })); -} - -/** - * Load node roles for Mermaid function-level styling. - * @returns {Map} "file::name" → role - */ -function loadNodeRoles(db, edges) { - const roles = new Map(); - const seen = new Set(); - for (const e of edges) { - for (const [file, name] of [ - [e.source_file, e.source_name], - [e.target_file, e.target_name], - ]) { - const key = `${file}::${name}`; - if (seen.has(key)) continue; - seen.add(key); - const row = db - .prepare('SELECT role FROM nodes WHERE file = ? AND name = ? AND role IS NOT NULL LIMIT 1') - .get(file, name); - if (row?.role) roles.set(key, row.role); - } - } - return roles; -} - -// ─── Public API ────────────────────────────────────────────────────── - -/** - * Export the dependency graph in DOT (Graphviz) format. - */ -export function exportDOT(db, opts = {}) { - const fileLevel = opts.fileLevel !== false; - const noTests = opts.noTests || false; - const minConfidence = opts.minConfidence; - const limit = opts.limit; - - if (fileLevel) { - const { edges, totalEdges } = loadFileLevelEdges(db, { noTests, minConfidence, limit }); - const allFiles = new Set(); - for (const { source, target } of edges) { - allFiles.add(source); - allFiles.add(target); - } - const dirs = loadDirectoryGroups(db, allFiles); - return renderFileLevelDOT({ dirs, edges, totalEdges, limit }); - } - - const { edges, totalEdges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); - return renderFunctionLevelDOT({ edges, totalEdges, limit }); -} - -/** - * Export the dependency graph in Mermaid format. - */ -export function exportMermaid(db, opts = {}) { - const fileLevel = opts.fileLevel !== false; - const noTests = opts.noTests || false; - const minConfidence = opts.minConfidence; - const direction = opts.direction || 'LR'; - const limit = opts.limit; - - if (fileLevel) { - const { edges, totalEdges } = loadFileLevelEdges(db, { - noTests, - minConfidence, - limit, - includeKind: true, - }); - const allFiles = new Set(); - for (const { source, target } of edges) { - allFiles.add(source); - allFiles.add(target); - } - const dirs = loadMermaidDirectoryGroups(db, allFiles); - return renderFileLevelMermaid({ direction, dirs, edges, totalEdges, limit }); - } - - const { edges, totalEdges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); - const roles = loadNodeRoles(db, edges); - return renderFunctionLevelMermaid({ direction, edges, roles, totalEdges, limit }); -} - -/** - * Export as JSON adjacency list. - */ -export function exportJSON(db, opts = {}) { - const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - - let nodes = db - .prepare(` - SELECT id, name, kind, file, line FROM nodes WHERE kind = 'file' - `) - .all(); - if (noTests) nodes = nodes.filter((n) => !isTestFile(n.file)); - - let edges = db - .prepare(` - SELECT DISTINCT n1.file AS source, n2.file AS target, e.kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE n1.file != n2.file AND e.confidence >= ? - `) - .all(minConf); - if (noTests) edges = edges.filter((e) => !isTestFile(e.source) && !isTestFile(e.target)); - - const base = { nodes, edges }; - return paginateResult(base, 'edges', { limit: opts.limit, offset: opts.offset }); -} - -/** - * Export the dependency graph in GraphML (XML) format. - */ -export function exportGraphML(db, opts = {}) { - const fileLevel = opts.fileLevel !== false; - const noTests = opts.noTests || false; - const minConfidence = opts.minConfidence; - const limit = opts.limit; - - if (fileLevel) { - const { edges } = loadFileLevelEdges(db, { noTests, minConfidence, limit }); - return renderFileLevelGraphML({ edges }); - } - - const { edges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); - return renderFunctionLevelGraphML({ edges }); -} - -/** - * Export the dependency graph in TinkerPop GraphSON v3 format. - */ -export function exportGraphSON(db, opts = {}) { - const noTests = opts.noTests || false; - const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - - let nodes = db - .prepare(` - SELECT id, name, kind, file, line, role FROM nodes - WHERE kind IN ('function', 'method', 'class', 'interface', 'type', 'struct', 'enum', 'trait', 'record', 'module', 'constant', 'file') - `) - .all(); - if (noTests) nodes = nodes.filter((n) => !isTestFile(n.file)); - - let edges = db - .prepare(` - SELECT e.rowid AS id, n1.id AS outV, n2.id AS inV, e.kind, e.confidence - FROM edges e - JOIN nodes n1 ON e.source_id = n1.id - JOIN nodes n2 ON e.target_id = n2.id - WHERE e.confidence >= ? - `) - .all(minConf); - if (noTests) { - const nodeIds = new Set(nodes.map((n) => n.id)); - edges = edges.filter((e) => nodeIds.has(e.outV) && nodeIds.has(e.inV)); - } - - const vertices = nodes.map((n) => ({ - id: n.id, - label: n.kind, - properties: { - name: [{ id: 0, value: n.name }], - file: [{ id: 0, value: n.file }], - ...(n.line != null ? { line: [{ id: 0, value: n.line }] } : {}), - ...(n.role ? { role: [{ id: 0, value: n.role }] } : {}), - }, - })); - - const gEdges = edges.map((e) => ({ - id: e.id, - label: e.kind, - inV: e.inV, - outV: e.outV, - properties: { - confidence: e.confidence, - }, - })); - - const base = { vertices, edges: gEdges }; - return paginateResult(base, 'edges', { limit: opts.limit, offset: opts.offset }); -} - -/** - * Export the dependency graph as Neo4j bulk-import CSV files. - * Returns { nodes: string, relationships: string }. - */ -export function exportNeo4jCSV(db, opts = {}) { - const fileLevel = opts.fileLevel !== false; - const noTests = opts.noTests || false; - const minConfidence = opts.minConfidence; - const limit = opts.limit; - - if (fileLevel) { - const { edges } = loadFileLevelEdges(db, { - noTests, - minConfidence, - limit, - includeKind: true, - includeConfidence: true, - }); - return renderFileLevelNeo4jCSV({ edges }); - } - - const { edges } = loadFunctionLevelEdges(db, { noTests, minConfidence, limit }); - return renderFunctionLevelNeo4jCSV({ edges }); -} diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..99bb4aea --- /dev/null +++ b/src/index.ts @@ -0,0 +1,67 @@ +/** + * codegraph — Programmatic API + * + * Curated public surface: *Data() query functions, graph building, + * export formats, and essential constants. CLI formatters and internal + * utilities are not exported — import them directly if needed. + * + * Usage: + * import { buildGraph, queryNameData, findCycles, exportDOT } from '@optave/codegraph'; + */ + +export { buildGraph } from './domain/graph/builder.js'; +export { findCycles } from './domain/graph/cycles.js'; +export { + briefData, + childrenData, + contextData, + diffImpactData, + explainData, + exportsData, + fileDepsData, + fnDepsData, + fnImpactData, + impactAnalysisData, + moduleMapData, + pathData, + queryNameData, + rolesData, + statsData, + whereData, +} from './domain/queries.js'; +export { + buildEmbeddings, + hybridSearchData, + multiSearchData, + searchData, +} from './domain/search/index.js'; +export { astQueryData } from './features/ast.js'; +export { auditData } from './features/audit.js'; +export { batchData } from './features/batch.js'; +export { branchCompareData } from './features/branch-compare.js'; +export { cfgData } from './features/cfg.js'; +export { checkData } from './features/check.js'; +export { coChangeData } from './features/cochange.js'; +export { communitiesData } from './features/communities.js'; +export { complexityData } from './features/complexity.js'; +export { dataflowData } from './features/dataflow.js'; +export { exportDOT, exportJSON, exportMermaid } from './features/export.js'; +export { flowData, listEntryPointsData } from './features/flow.js'; +export { manifestoData } from './features/manifesto.js'; +export { ownersData } from './features/owners.js'; +export { sequenceData } from './features/sequence.js'; +export { hotspotsData, moduleBoundariesData, structureData } from './features/structure.js'; +export { triageData } from './features/triage.js'; +export { loadConfig } from './infrastructure/config.js'; +export { EXTENSIONS, IGNORE_DIRS } from './shared/constants.js'; +export { + AnalysisError, + BoundaryError, + CodegraphError, + ConfigError, + DbError, + EngineError, + ParseError, + ResolutionError, +} from './shared/errors.js'; +export { EVERY_EDGE_KIND, EVERY_SYMBOL_KIND } from './shared/kinds.js'; diff --git a/src/mcp/index.js b/src/mcp/index.js deleted file mode 100644 index 551c65b8..00000000 --- a/src/mcp/index.js +++ /dev/null @@ -1,2 +0,0 @@ -export { startMCPServer } from './server.js'; -export { buildToolList, TOOLS } from './tool-registry.js'; diff --git a/src/mcp/middleware.js b/src/mcp/middleware.js deleted file mode 100644 index 01bb8ed2..00000000 --- a/src/mcp/middleware.js +++ /dev/null @@ -1,44 +0,0 @@ -/** - * MCP middleware helpers — pagination defaults and limits. - */ - -import { getMcpDefaults, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../shared/paginate.js'; - -export { MCP_DEFAULTS, MCP_MAX_LIMIT }; - -/** Resolved MCP defaults (may include config overrides). Set via initMcpDefaults(). */ -let resolvedDefaults = MCP_DEFAULTS; - -/** - * Initialize MCP defaults from config. Call once at server startup. - * @param {object} [configMcpDefaults] - config.mcp.defaults overrides - */ -export function initMcpDefaults(configMcpDefaults) { - resolvedDefaults = getMcpDefaults(configMcpDefaults); -} - -/** - * Reset MCP defaults back to the base defaults. Useful for test isolation. - */ -export function resetMcpDefaults() { - resolvedDefaults = MCP_DEFAULTS; -} - -/** - * Resolve effective limit for a tool call. - * @param {object} args - Tool arguments - * @param {string} toolName - Tool name (for default lookup) - * @returns {number} - */ -export function effectiveLimit(args, toolName) { - return Math.min(args.limit ?? resolvedDefaults[toolName] ?? 100, MCP_MAX_LIMIT); -} - -/** - * Resolve effective offset for a tool call. - * @param {object} args - Tool arguments - * @returns {number} - */ -export function effectiveOffset(args) { - return args.offset ?? 0; -} diff --git a/src/mcp/server.js b/src/mcp/server.js deleted file mode 100644 index db5d3eb8..00000000 --- a/src/mcp/server.js +++ /dev/null @@ -1,143 +0,0 @@ -/** - * MCP (Model Context Protocol) server for codegraph. - * Exposes codegraph queries as tools that AI coding assistants can call. - * - * Requires: npm install @modelcontextprotocol/sdk - */ - -import { createRequire } from 'node:module'; -import { findDbPath } from '../db/index.js'; -import { loadConfig } from '../infrastructure/config.js'; -import { CodegraphError, ConfigError } from '../shared/errors.js'; -import { MCP_MAX_LIMIT } from '../shared/paginate.js'; -import { initMcpDefaults } from './middleware.js'; -import { buildToolList } from './tool-registry.js'; -import { TOOL_HANDLERS } from './tools/index.js'; - -/** - * Start the MCP server. - * This function requires @modelcontextprotocol/sdk to be installed. - * - * @param {string} [customDbPath] - Path to a specific graph.db - * @param {object} [options] - * @param {boolean} [options.multiRepo] - Enable multi-repo access (default: false) - * @param {string[]} [options.allowedRepos] - Restrict access to these repo names only - */ -async function loadMCPSdk() { - try { - const sdk = await import('@modelcontextprotocol/sdk/server/index.js'); - const transport = await import('@modelcontextprotocol/sdk/server/stdio.js'); - const types = await import('@modelcontextprotocol/sdk/types.js'); - return { - Server: sdk.Server, - StdioServerTransport: transport.StdioServerTransport, - ListToolsRequestSchema: types.ListToolsRequestSchema, - CallToolRequestSchema: types.CallToolRequestSchema, - }; - } catch { - throw new ConfigError( - 'MCP server requires @modelcontextprotocol/sdk.\nInstall it with: npm install @modelcontextprotocol/sdk', - ); - } -} - -function createLazyLoaders() { - let _queries; - let _Database; - return { - async getQueries() { - if (!_queries) _queries = await import('../domain/queries.js'); - return _queries; - }, - getDatabase() { - if (!_Database) { - const require = createRequire(import.meta.url); - _Database = require('better-sqlite3'); - } - return _Database; - }, - }; -} - -async function resolveDbPath(customDbPath, args, allowedRepos) { - let dbPath = customDbPath || undefined; - if (args.repo) { - if (allowedRepos && !allowedRepos.includes(args.repo)) { - throw new ConfigError(`Repository "${args.repo}" is not in the allowed repos list.`); - } - const { resolveRepoDbPath } = await import('../infrastructure/registry.js'); - const resolved = resolveRepoDbPath(args.repo); - if (!resolved) - throw new ConfigError( - `Repository "${args.repo}" not found in registry or its database is missing.`, - ); - dbPath = resolved; - } - return dbPath; -} - -function validateMultiRepoAccess(multiRepo, name, args) { - if (!multiRepo && args.repo) { - throw new ConfigError( - 'Multi-repo access is disabled. Restart with `codegraph mcp --multi-repo` to access other repositories.', - ); - } - if (!multiRepo && name === 'list_repos') { - throw new ConfigError( - 'Multi-repo access is disabled. Restart with `codegraph mcp --multi-repo` to list repositories.', - ); - } -} - -export async function startMCPServer(customDbPath, options = {}) { - const { allowedRepos } = options; - const multiRepo = options.multiRepo || !!allowedRepos; - - // Apply config-based MCP page-size overrides - const config = options.config || loadConfig(); - initMcpDefaults(config.mcp?.defaults); - - const { Server, StdioServerTransport, ListToolsRequestSchema, CallToolRequestSchema } = - await loadMCPSdk(); - - // Connect transport FIRST so the server can receive the client's - // `initialize` request while heavy modules (queries, better-sqlite3) - // are still loading. These are lazy-loaded on the first tool call - // and cached for subsequent calls. - const { getQueries, getDatabase } = createLazyLoaders(); - - const server = new Server( - { name: 'codegraph', version: '1.0.0' }, - { capabilities: { tools: {} } }, - ); - - server.setRequestHandler(ListToolsRequestSchema, async () => ({ - tools: buildToolList(multiRepo), - })); - - server.setRequestHandler(CallToolRequestSchema, async (request) => { - const { name, arguments: args } = request.params; - try { - validateMultiRepoAccess(multiRepo, name, args); - const dbPath = await resolveDbPath(customDbPath, args, allowedRepos); - - const toolEntry = TOOL_HANDLERS.get(name); - if (!toolEntry) { - return { content: [{ type: 'text', text: `Unknown tool: ${name}` }], isError: true }; - } - - const ctx = { dbPath, getQueries, getDatabase, findDbPath, allowedRepos, MCP_MAX_LIMIT }; - const result = await toolEntry.handler(args, ctx); - if (result?.content) return result; - return { content: [{ type: 'text', text: JSON.stringify(result, null, 2) }] }; - } catch (err) { - const code = err instanceof CodegraphError ? err.code : 'UNKNOWN_ERROR'; - const text = - err instanceof CodegraphError ? `[${code}] ${err.message}` : `Error: ${err.message}`; - return { content: [{ type: 'text', text }], isError: true }; - } - }); - - const transport = new StdioServerTransport(); - await server.connect(transport); -} diff --git a/src/mcp/server.ts b/src/mcp/server.ts index da9bd5b7..b16eaa91 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -154,8 +154,15 @@ export async function startMCPServer( return { content: [{ type: 'text', text: `Unknown tool: ${name}` }], isError: true }; } - const ctx = { dbPath, getQueries, getDatabase, findDbPath, allowedRepos, MCP_MAX_LIMIT }; - const result = await toolEntry.handler(args, ctx); + const ctx: McpToolContext = { + dbPath: dbPath!, + getQueries, + getDatabase, + findDbPath, + allowedRepos, + MCP_MAX_LIMIT, + }; + const result = (await toolEntry.handler(args, ctx)) as any; if (result?.content) return result; return { content: [{ type: 'text', text: JSON.stringify(result, null, 2) }] }; } catch (err: unknown) { diff --git a/src/mcp/tool-registry.js b/src/mcp/tool-registry.js deleted file mode 100644 index 06b8c70e..00000000 --- a/src/mcp/tool-registry.js +++ /dev/null @@ -1,852 +0,0 @@ -/** - * MCP tool schema registry. - * - * Owns BASE_TOOLS, LIST_REPOS_TOOL, buildToolList(), and the backward-compatible TOOLS export. - */ - -import { EVERY_EDGE_KIND, EVERY_SYMBOL_KIND, VALID_ROLES } from '../domain/queries.js'; -import { AST_NODE_KINDS } from '../features/ast.js'; - -const REPO_PROP = { - repo: { - type: 'string', - description: 'Repository name from the registry (omit for local project)', - }, -}; - -const PAGINATION_PROPS = { - limit: { type: 'number', description: 'Max results to return (pagination)' }, - offset: { type: 'number', description: 'Skip this many results (pagination, default: 0)' }, -}; - -const BASE_TOOLS = [ - { - name: 'query', - description: - 'Query the call graph: find callers/callees with transitive chain, or find shortest path between two symbols', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method/class name (partial match)' }, - mode: { - type: 'string', - enum: ['deps', 'path'], - description: 'deps (default): dependency chain. path: shortest path to target', - }, - depth: { - type: 'number', - description: 'Transitive depth (deps default: 3, path default: 10)', - }, - file: { - type: 'string', - description: 'Scope search to functions in this file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter by symbol kind', - }, - to: { type: 'string', description: 'Target symbol for path mode (required in path mode)' }, - edge_kinds: { - type: 'array', - items: { type: 'string', enum: EVERY_EDGE_KIND }, - description: 'Edge kinds to follow in path mode (default: ["calls"])', - }, - reverse: { - type: 'boolean', - description: 'Follow edges backward in path mode', - default: false, - }, - from_file: { type: 'string', description: 'Disambiguate source by file in path mode' }, - to_file: { type: 'string', description: 'Disambiguate target by file in path mode' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'path', - description: 'Find shortest path between two symbols in the dependency graph', - inputSchema: { - type: 'object', - properties: { - from: { type: 'string', description: 'Source symbol name' }, - to: { type: 'string', description: 'Target symbol name' }, - depth: { type: 'number', description: 'Max traversal depth (default: 10)' }, - edge_kinds: { - type: 'array', - items: { type: 'string', enum: EVERY_EDGE_KIND }, - description: 'Edge kinds to follow (default: ["calls"])', - }, - from_file: { type: 'string', description: 'Disambiguate source by file' }, - to_file: { type: 'string', description: 'Disambiguate target by file' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - required: ['from', 'to'], - }, - }, - { - name: 'file_deps', - description: 'Show what a file imports and what imports it', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'File path (partial match supported)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['file'], - }, - }, - { - name: 'brief', - description: - 'Token-efficient file summary: symbols with roles and transitive caller counts, importer counts, and file risk tier (high/medium/low). Designed for context injection.', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'File path (partial match supported)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - required: ['file'], - }, - }, - { - name: 'file_exports', - description: - 'Show exported symbols of a file with per-symbol consumers — who calls each export and from where', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'File path (partial match supported)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - unused: { - type: 'boolean', - description: 'Show only exports with zero consumers', - default: false, - }, - ...PAGINATION_PROPS, - }, - required: ['file'], - }, - }, - { - name: 'impact_analysis', - description: 'Show files affected by changes to a given file (transitive)', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'File path to analyze' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - required: ['file'], - }, - }, - { - name: 'find_cycles', - description: 'Detect circular dependencies in the codebase', - inputSchema: { - type: 'object', - properties: {}, - }, - }, - { - name: 'module_map', - description: 'Get high-level overview of most-connected files', - inputSchema: { - type: 'object', - properties: { - limit: { type: 'number', description: 'Number of top files to show', default: 20 }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - }, - }, - { - name: 'fn_impact', - description: - 'Show function-level blast radius: all functions transitively affected by changes to a function', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method/class name (partial match)' }, - depth: { type: 'number', description: 'Max traversal depth', default: 5 }, - file: { - type: 'string', - description: 'Scope search to functions in this file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter to a specific symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'context', - description: - 'Full context for a function: source code, dependencies with summaries, callers, signature, and related tests — everything needed to understand or modify a function in one call', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method/class name (partial match)' }, - depth: { - type: 'number', - description: 'Include callee source up to N levels deep (0=no source, 1=direct)', - default: 0, - }, - file: { - type: 'string', - description: 'Scope search to functions in this file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter to a specific symbol kind', - }, - no_source: { - type: 'boolean', - description: 'Skip source extraction (metadata only)', - default: false, - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - include_tests: { - type: 'boolean', - description: 'Include test file source code', - default: false, - }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'symbol_children', - description: - 'List sub-declaration children of a symbol: parameters, properties, constants. Answers "what fields does this class have?" without reading source.', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method/class name (partial match)' }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { type: 'string', enum: EVERY_SYMBOL_KIND, description: 'Filter by symbol kind' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'where', - description: - 'Find where a symbol is defined and used, or list symbols/imports/exports for a file. Minimal, fast lookup.', - inputSchema: { - type: 'object', - properties: { - target: { type: 'string', description: 'Symbol name or file path' }, - file_mode: { - type: 'boolean', - description: 'Treat target as file path (list symbols/imports/exports)', - default: false, - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['target'], - }, - }, - { - name: 'diff_impact', - description: 'Analyze git diff to find which functions changed and their transitive callers', - inputSchema: { - type: 'object', - properties: { - staged: { type: 'boolean', description: 'Analyze staged changes only', default: false }, - ref: { type: 'string', description: 'Git ref to diff against (default: HEAD)' }, - depth: { type: 'number', description: 'Transitive caller depth', default: 3 }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - format: { - type: 'string', - enum: ['json', 'mermaid'], - description: 'Output format (default: json)', - }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'semantic_search', - description: - 'Search code symbols by meaning using embeddings and/or keyword matching (requires prior `codegraph embed`). Default hybrid mode combines BM25 keyword + semantic search for best results.', - inputSchema: { - type: 'object', - properties: { - query: { type: 'string', description: 'Natural language search query' }, - limit: { type: 'number', description: 'Max results to return', default: 15 }, - min_score: { type: 'number', description: 'Minimum similarity score (0-1)', default: 0.2 }, - mode: { - type: 'string', - enum: ['hybrid', 'semantic', 'keyword'], - description: - 'Search mode: hybrid (BM25 + semantic, default), semantic (embeddings only), keyword (BM25 only)', - }, - ...PAGINATION_PROPS, - }, - required: ['query'], - }, - }, - { - name: 'export_graph', - description: - 'Export the dependency graph in DOT, Mermaid, JSON, GraphML, GraphSON, or Neo4j CSV format', - inputSchema: { - type: 'object', - properties: { - format: { - type: 'string', - enum: ['dot', 'mermaid', 'json', 'graphml', 'graphson', 'neo4j'], - description: 'Export format', - }, - file_level: { - type: 'boolean', - description: 'File-level graph (true) or function-level (false)', - default: true, - }, - ...PAGINATION_PROPS, - }, - required: ['format'], - }, - }, - { - name: 'list_functions', - description: - 'List functions, methods, classes, structs, enums, traits, records, and modules in the codebase, optionally filtered by file or name pattern', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'Filter by file path (partial match)' }, - pattern: { type: 'string', description: 'Filter by function name (partial match)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'structure', - description: - 'Show project structure with directory hierarchy, cohesion scores, and per-file metrics. Per-file details are capped at 25 files by default; use full=true to show all.', - inputSchema: { - type: 'object', - properties: { - directory: { type: 'string', description: 'Filter to a specific directory path' }, - depth: { type: 'number', description: 'Max directory depth to show' }, - sort: { - type: 'string', - enum: ['cohesion', 'fan-in', 'fan-out', 'density', 'files'], - description: 'Sort directories by metric', - }, - full: { - type: 'boolean', - description: 'Return all files without limit', - default: false, - }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'node_roles', - description: - 'Show node role classification (entry, core, utility, adapter, dead [dead-leaf, dead-entry, dead-ffi, dead-unresolved], leaf) based on connectivity patterns', - inputSchema: { - type: 'object', - properties: { - role: { - type: 'string', - enum: VALID_ROLES, - description: 'Filter to a specific role', - }, - file: { type: 'string', description: 'Scope to a specific file (partial match)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'co_changes', - description: - 'Find files that historically change together based on git commit history. Requires prior `codegraph co-change --analyze`.', - inputSchema: { - type: 'object', - properties: { - file: { - type: 'string', - description: 'File path (partial match). Omit for top global pairs.', - }, - limit: { type: 'number', description: 'Max results', default: 20 }, - min_jaccard: { - type: 'number', - description: 'Minimum Jaccard similarity (0-1)', - default: 0.3, - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - offset: { type: 'number', description: 'Skip this many results (pagination, default: 0)' }, - }, - }, - }, - { - name: 'execution_flow', - description: - 'Trace execution flow forward from an entry point through callees to leaves, or list all entry points with list=true', - inputSchema: { - type: 'object', - properties: { - name: { - type: 'string', - description: - 'Entry point or function name (required unless list=true). Supports prefix-stripped matching.', - }, - list: { - type: 'boolean', - description: 'List all entry points grouped by type', - default: false, - }, - depth: { type: 'number', description: 'Max forward traversal depth', default: 10 }, - file: { - type: 'string', - description: 'Scope search to functions in this file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter to a specific symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'sequence', - description: - 'Generate a Mermaid sequence diagram from call graph edges. Participants are files, messages are function calls between them.', - inputSchema: { - type: 'object', - properties: { - name: { - type: 'string', - description: 'Entry point or function name to trace from (partial match)', - }, - depth: { type: 'number', description: 'Max forward traversal depth', default: 10 }, - format: { - type: 'string', - enum: ['mermaid', 'json'], - description: 'Output format (default: mermaid)', - }, - dataflow: { - type: 'boolean', - description: 'Annotate with parameter names and return arrows', - default: false, - }, - file: { - type: 'string', - description: 'Scope search to functions in this file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter to a specific symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'complexity', - description: - 'Show per-function complexity metrics (cognitive, cyclomatic, nesting, Halstead, Maintainability Index). Sorted by most complex first.', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function name filter (partial match)' }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - limit: { type: 'number', description: 'Max results', default: 20 }, - sort: { - type: 'string', - enum: ['cognitive', 'cyclomatic', 'nesting', 'mi', 'volume', 'effort', 'bugs', 'loc'], - description: 'Sort metric', - default: 'cognitive', - }, - above_threshold: { - type: 'boolean', - description: 'Only functions exceeding warn thresholds', - default: false, - }, - health: { - type: 'boolean', - description: 'Include Halstead and Maintainability Index metrics', - default: false, - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - kind: { - type: 'string', - description: 'Filter by symbol kind (function, method, class, etc.)', - }, - offset: { type: 'number', description: 'Skip this many results (pagination, default: 0)' }, - }, - }, - }, - { - name: 'communities', - description: - 'Detect natural module boundaries using Louvain community detection. Compares discovered communities against directory structure and surfaces architectural drift.', - inputSchema: { - type: 'object', - properties: { - functions: { - type: 'boolean', - description: 'Function-level instead of file-level', - default: false, - }, - resolution: { - type: 'number', - description: 'Louvain resolution parameter (higher = more communities)', - default: 1.0, - }, - drift: { - type: 'boolean', - description: 'Show only drift analysis (omit community member lists)', - default: false, - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'code_owners', - description: - 'Show CODEOWNERS mapping for files and functions. Shows ownership coverage, per-owner breakdown, and cross-owner boundary edges.', - inputSchema: { - type: 'object', - properties: { - file: { type: 'string', description: 'Scope to a specific file (partial match)' }, - owner: { type: 'string', description: 'Filter to a specific owner (e.g. @team-name)' }, - boundary: { - type: 'boolean', - description: 'Show cross-owner boundary edges', - default: false, - }, - kind: { - type: 'string', - description: 'Filter by symbol kind (function, method, class, etc.)', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - }, - }, - { - name: 'audit', - description: - 'Composite report combining explain, fn-impact, and health metrics for a file or function. Returns structure, blast radius, complexity, and threshold breaches in one call.', - inputSchema: { - type: 'object', - properties: { - target: { type: 'string', description: 'File path or function name' }, - quick: { - type: 'boolean', - description: 'Structural summary only (skip impact + health)', - default: false, - }, - depth: { type: 'number', description: 'Impact analysis depth (default: 3)', default: 3 }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { - type: 'string', - description: 'Filter by symbol kind (function, method, class, etc.)', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['target'], - }, - }, - { - name: 'batch_query', - description: - 'Run a query command against multiple targets in one call. Returns all results in a single JSON payload — ideal for multi-agent dispatch.', - inputSchema: { - type: 'object', - properties: { - command: { - type: 'string', - enum: [ - 'fn-impact', - 'context', - 'explain', - 'where', - 'query', - 'impact', - 'deps', - 'flow', - 'dataflow', - 'complexity', - ], - description: 'The query command to run for each target', - }, - targets: { - type: 'array', - items: { type: 'string' }, - description: 'List of target names (symbol names or file paths depending on command)', - }, - depth: { - type: 'number', - description: 'Traversal depth (for fn-impact, context, fn, flow)', - }, - file: { - type: 'string', - description: 'Scope to file (partial match)', - }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - }, - required: ['command', 'targets'], - }, - }, - { - name: 'triage', - description: - 'Ranked audit queue by composite risk score. Merges connectivity (fan-in), complexity (cognitive), churn (commit count), role classification, and maintainability index into a single weighted score.', - inputSchema: { - type: 'object', - properties: { - level: { - type: 'string', - enum: ['function', 'file', 'directory'], - description: - 'Granularity: function (default) | file | directory. File/directory shows hotspots', - }, - sort: { - type: 'string', - enum: ['risk', 'complexity', 'churn', 'fan-in', 'mi'], - description: 'Sort metric (default: risk)', - }, - min_score: { - type: 'number', - description: 'Only return symbols with risk score >= this threshold (0-1)', - }, - role: { - type: 'string', - enum: VALID_ROLES, - description: 'Filter by role classification', - }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { - type: 'string', - enum: ['function', 'method', 'class'], - description: 'Filter by symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - weights: { - type: 'object', - description: - 'Custom scoring weights (e.g. {"fanIn":1,"complexity":0,"churn":0,"role":0,"mi":0})', - }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'branch_compare', - description: - 'Compare code structure between two git refs (branches, tags, commits). Shows added/removed/changed symbols and transitive caller impact using temporary git worktrees.', - inputSchema: { - type: 'object', - properties: { - base: { type: 'string', description: 'Base git ref (branch, tag, or commit SHA)' }, - target: { type: 'string', description: 'Target git ref to compare against base' }, - depth: { type: 'number', description: 'Max transitive caller depth', default: 3 }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - format: { - type: 'string', - enum: ['json', 'mermaid'], - description: 'Output format (default: json)', - }, - }, - required: ['base', 'target'], - }, - }, - { - name: 'cfg', - description: 'Show intraprocedural control flow graph for a function.', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method name (partial match)' }, - format: { - type: 'string', - enum: ['json', 'dot', 'mermaid'], - description: 'Output format (default: json)', - }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { type: 'string', enum: EVERY_SYMBOL_KIND, description: 'Filter by symbol kind' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'dataflow', - description: 'Show data flow edges or data-dependent blast radius.', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Function/method name (partial match)' }, - mode: { - type: 'string', - enum: ['edges', 'impact'], - description: 'edges (default) or impact', - }, - depth: { type: 'number', description: 'Max depth for impact mode', default: 5 }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { type: 'string', enum: EVERY_SYMBOL_KIND, description: 'Filter by symbol kind' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'check', - description: - 'CI gate: run manifesto rules (no args), diff predicates (with ref/staged), or both (with rules flag). Returns pass/fail verdicts.', - inputSchema: { - type: 'object', - properties: { - ref: { type: 'string', description: 'Git ref to diff against (default: HEAD)' }, - staged: { type: 'boolean', description: 'Analyze staged changes instead of unstaged' }, - rules: { - type: 'boolean', - description: 'Also run manifesto rules alongside diff predicates', - }, - cycles: { type: 'boolean', description: 'Enable cycles predicate (default: true)' }, - blast_radius: { - type: 'number', - description: 'Max transitive callers threshold (null = disabled)', - }, - signatures: { type: 'boolean', description: 'Enable signatures predicate (default: true)' }, - boundaries: { type: 'boolean', description: 'Enable boundaries predicate (default: true)' }, - depth: { type: 'number', description: 'Max BFS depth for blast radius (default: 3)' }, - file: { type: 'string', description: 'Scope to file (partial match, manifesto mode)' }, - kind: { - type: 'string', - description: 'Filter by symbol kind (manifesto mode)', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, - { - name: 'implementations', - description: - 'List all concrete types (classes, structs, records) that implement a given interface or trait', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Interface/trait name (partial match)' }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter by symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'interfaces', - description: 'List all interfaces and traits that a given class, struct, or record implements', - inputSchema: { - type: 'object', - properties: { - name: { type: 'string', description: 'Class/struct name (partial match)' }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - kind: { - type: 'string', - enum: EVERY_SYMBOL_KIND, - description: 'Filter by symbol kind', - }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - required: ['name'], - }, - }, - { - name: 'ast_query', - description: - 'Search stored AST nodes (calls, literals, new, throw, await) by pattern. Requires a prior build.', - inputSchema: { - type: 'object', - properties: { - pattern: { - type: 'string', - description: 'GLOB pattern for node name (auto-wrapped in *..* for substring match)', - }, - kind: { - type: 'string', - enum: AST_NODE_KINDS, - description: 'Filter by AST node kind', - }, - file: { type: 'string', description: 'Scope to file (partial match)' }, - no_tests: { type: 'boolean', description: 'Exclude test files', default: false }, - ...PAGINATION_PROPS, - }, - }, - }, -]; - -const LIST_REPOS_TOOL = { - name: 'list_repos', - description: 'List all repositories registered in the codegraph registry', - inputSchema: { - type: 'object', - properties: {}, - }, -}; - -/** - * Build the tool list based on multi-repo mode. - * @param {boolean} multiRepo - If true, inject `repo` prop into each tool and append `list_repos` - * @returns {object[]} - */ -export function buildToolList(multiRepo) { - if (!multiRepo) return BASE_TOOLS; - return [ - ...BASE_TOOLS.map((tool) => ({ - ...tool, - inputSchema: { - ...tool.inputSchema, - properties: { ...tool.inputSchema.properties, ...REPO_PROP }, - }, - })), - LIST_REPOS_TOOL, - ]; -} - -// Backward-compatible export: full multi-repo tool list -export const TOOLS = buildToolList(true); diff --git a/src/presentation/audit.ts b/src/presentation/audit.ts index d77a8fb1..03e6bce5 100644 --- a/src/presentation/audit.ts +++ b/src/presentation/audit.ts @@ -11,6 +11,8 @@ interface AuditOpts { quick?: boolean; limit?: number; offset?: number; + depth?: number; + config?: unknown; } export function audit( @@ -19,7 +21,7 @@ export function audit( opts: AuditOpts = {}, ): void { // biome-ignore lint/suspicious/noExplicitAny: audit data shape is dynamic - const data: any = auditData(target, customDbPath, opts); + const data: any = auditData(target, customDbPath, opts as any); if (outputResult(data, null, opts)) return; diff --git a/src/presentation/branch-compare.ts b/src/presentation/branch-compare.ts index de01aeb8..e2429d7a 100644 --- a/src/presentation/branch-compare.ts +++ b/src/presentation/branch-compare.ts @@ -121,6 +121,8 @@ interface BranchCompareCliOpts { repoRoot?: string; noTests?: boolean; dbPath?: string; + engine?: string; + depth?: number; } export async function branchCompare( diff --git a/src/presentation/check.ts b/src/presentation/check.ts index 30515008..0887d2f6 100644 --- a/src/presentation/check.ts +++ b/src/presentation/check.ts @@ -7,10 +7,16 @@ interface CheckCliOpts { ndjson?: boolean; noTests?: boolean; base?: string; + ref?: string; + staged?: boolean; + cycles?: string; + blastRadius?: number; + depth?: number; maxCyclomatic?: number; maxBlastRadius?: number; boundaries?: boolean; signatures?: boolean; + config?: unknown; } interface CheckViolation { @@ -47,7 +53,7 @@ interface CheckDataResult { } export function check(customDbPath: string | undefined, opts: CheckCliOpts = {}): void { - const data = checkData(customDbPath, opts) as CheckDataResult; + const data = checkData(customDbPath, opts as any) as CheckDataResult; if (data.error) { throw new AnalysisError(data.error); diff --git a/src/presentation/complexity.ts b/src/presentation/complexity.ts index 4efd3d4e..e027cdc8 100644 --- a/src/presentation/complexity.ts +++ b/src/presentation/complexity.ts @@ -13,6 +13,7 @@ interface ComplexityCliOpts { file?: string; kind?: string; offset?: number; + config?: unknown; } interface ComplexityFunction { @@ -48,7 +49,7 @@ interface ComplexityResult { } export function complexity(customDbPath: string | undefined, opts: ComplexityCliOpts = {}): void { - const data = complexityData(customDbPath, opts) as unknown as ComplexityResult; + const data = complexityData(customDbPath, opts as any) as unknown as ComplexityResult; if (outputResult(data, 'functions', opts)) return; diff --git a/src/presentation/owners.ts b/src/presentation/owners.ts index e8854a2a..04a8c150 100644 --- a/src/presentation/owners.ts +++ b/src/presentation/owners.ts @@ -7,6 +7,9 @@ interface OwnersOpts { table?: boolean; csv?: boolean; owner?: string; + boundary?: string; + file?: string | string[]; + kind?: string; noTests?: boolean; limit?: number; offset?: number; @@ -39,7 +42,7 @@ interface OwnersResult { } export function owners(customDbPath: string | undefined, opts: OwnersOpts = {}): void { - const data = ownersData(customDbPath, opts) as OwnersResult; + const data = ownersData(customDbPath, opts as any) as OwnersResult; if (outputResult(data as unknown as Record, null, opts)) return; if (!data.codeownersFile) { diff --git a/src/presentation/structure.ts b/src/presentation/structure.ts index 8d0ba6b6..806bbfb0 100644 --- a/src/presentation/structure.ts +++ b/src/presentation/structure.ts @@ -64,7 +64,8 @@ interface HotspotsResult { metric: string; level: string; limit: number; - hotspots: HotspotEntry[]; + // biome-ignore lint/suspicious/noExplicitAny: hotspots shape varies by level + hotspots: any[]; } export function formatHotspots(data: HotspotsResult): string { diff --git a/src/presentation/triage.ts b/src/presentation/triage.ts index da86d6ca..ca21a4b2 100644 --- a/src/presentation/triage.ts +++ b/src/presentation/triage.ts @@ -15,6 +15,7 @@ interface TriageOpts { limit?: number; offset?: number; config?: unknown; + weights?: Record; } interface TriageItem { diff --git a/src/types.ts b/src/types.ts index df2561cf..d345fb0a 100644 --- a/src/types.ts +++ b/src/types.ts @@ -500,6 +500,7 @@ export interface LanguageRegistryEntry { /** tree-sitter opaque types (thin wrappers — real impl is WASM). */ export interface TreeSitterNode { + id: number; type: string; text: string; startPosition: { row: number; column: number }; @@ -961,6 +962,8 @@ export interface BuildGraphOpts { engine?: EngineMode; dataflow?: boolean; ast?: boolean; + complexity?: boolean; + cfg?: boolean; scope?: string[]; skipRegistry?: boolean; } @@ -1809,3 +1812,35 @@ export interface NativeParseTreeCache { delete(filePath: string): void; clear(): void; } + +// ════════════════════════════════════════════════════════════════════════ +// §14 CLI Command Framework +// ════════════════════════════════════════════════════════════════════════ + +/** Shared context passed to every CLI command's execute/validate functions. */ +export interface CommandContext { + // biome-ignore lint/suspicious/noExplicitAny: config is a deeply nested dynamic object + config: Record; + // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed + resolveNoTests: (opts: any) => boolean; + // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed + resolveQueryOpts: (opts: any) => any; + formatSize: (bytes: number) => string; + // biome-ignore lint/suspicious/noExplicitAny: data shapes vary per command + outputResult: (data: any, key: string, opts: any) => boolean; + program: import('commander').Command; +} + +/** Shape of a CLI command definition used by the registerCommand framework. */ +export interface CommandDefinition { + name: string; + description: string; + queryOpts?: boolean; + // biome-ignore lint/suspicious/noExplicitAny: option tuples contain mixed types (strings, functions, defaults) + options?: Array<[string, string, ...any[]]>; + // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed + validate?: (args: any[], opts: any, ctx: CommandContext) => string | undefined | void; + // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed + execute?: (args: any[], opts: any, ctx: CommandContext) => void | Promise; + subcommands?: CommandDefinition[]; +} diff --git a/tests/integration/batch.test.js b/tests/integration/batch.test.js index 216c98e3..663c271c 100644 --- a/tests/integration/batch.test.js +++ b/tests/integration/batch.test.js @@ -15,6 +15,7 @@ import { execFileSync } from 'node:child_process'; import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; +import { pathToFileURL } from 'node:url'; import Database from 'better-sqlite3'; import { afterAll, beforeAll, describe, expect, test } from 'vitest'; import { initSchema } from '../../src/db/index.js'; @@ -215,14 +216,19 @@ describe('batchData — complexity (dbOnly signature)', () => { describe.skipIf(!canStripTypes)('batch CLI', () => { const cliPath = path.resolve( path.dirname(new URL(import.meta.url).pathname.replace(/^\/([A-Z]:)/i, '$1')), - '../../src/cli.js', + '../../src/cli.ts', ); test('outputs valid JSON', () => { - const out = execFileSync('node', [cliPath, 'batch', 'query', 'authenticate', '--db', dbPath], { - encoding: 'utf-8', - timeout: 30_000, - }); + const loaderPath = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; + const out = execFileSync( + 'node', + ['--import', loaderPath, cliPath, 'batch', 'query', 'authenticate', '--db', dbPath], + { + encoding: 'utf-8', + timeout: 30_000, + }, + ); const parsed = JSON.parse(out); expect(parsed.command).toBe('query'); expect(parsed.total).toBe(1); @@ -230,9 +236,19 @@ describe.skipIf(!canStripTypes)('batch CLI', () => { }); test('batch accepts comma-separated positional targets', () => { + const loaderPath = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; const out = execFileSync( 'node', - [cliPath, 'batch', 'where', 'authenticate,validateToken', '--db', dbPath], + [ + '--import', + loaderPath, + cliPath, + 'batch', + 'where', + 'authenticate,validateToken', + '--db', + dbPath, + ], { encoding: 'utf-8', timeout: 30_000 }, ); const parsed = JSON.parse(out); diff --git a/tests/integration/cli.test.js b/tests/integration/cli.test.js index 52cc8ff1..5bb29796 100644 --- a/tests/integration/cli.test.js +++ b/tests/integration/cli.test.js @@ -13,7 +13,10 @@ import { afterAll, beforeAll, describe, expect, test } from 'vitest'; const [_major, _minor] = process.versions.node.split('.').map(Number); const canStripTypes = _major > 22 || (_major === 22 && _minor >= 6); -const CLI = path.resolve('src/cli.js'); +import { pathToFileURL } from 'node:url'; + +const CLI = path.resolve('src/cli.ts'); +const LOADER = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; const FIXTURE_FILES = { 'math.js': ` @@ -44,7 +47,7 @@ let tmpDir, tmpHome, dbPath; /** Run the CLI and return stdout as a string. Throws on non-zero exit. */ function run(...args) { - return execFileSync('node', [CLI, ...args], { + return execFileSync('node', ['--import', LOADER, CLI, ...args], { cwd: tmpDir, encoding: 'utf-8', timeout: 30_000, @@ -246,7 +249,7 @@ describe.skipIf(!canStripTypes)('Registry CLI commands', () => { /** Run CLI with isolated HOME to avoid touching real registry */ function runReg(...args) { - return execFileSync('node', [CLI, ...args], { + return execFileSync('node', ['--import', LOADER, CLI, ...args], { cwd: tmpDir, encoding: 'utf-8', timeout: 30_000, From 9a169a02602be36fd80e8879123663f83ec18b5e Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Mon, 23 Mar 2026 23:49:56 -0600 Subject: [PATCH 05/26] fix(tests): revert test CLI path to cli.js for standalone PR compatibility MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The cli.js → cli.ts rename lives in PR #580. This PR must reference cli.js so CI passes independently without the CLI migration. --- tests/integration/batch.test.js | 28 ++++++---------------------- tests/integration/cli.test.js | 9 +++------ 2 files changed, 9 insertions(+), 28 deletions(-) diff --git a/tests/integration/batch.test.js b/tests/integration/batch.test.js index 663c271c..216c98e3 100644 --- a/tests/integration/batch.test.js +++ b/tests/integration/batch.test.js @@ -15,7 +15,6 @@ import { execFileSync } from 'node:child_process'; import fs from 'node:fs'; import os from 'node:os'; import path from 'node:path'; -import { pathToFileURL } from 'node:url'; import Database from 'better-sqlite3'; import { afterAll, beforeAll, describe, expect, test } from 'vitest'; import { initSchema } from '../../src/db/index.js'; @@ -216,19 +215,14 @@ describe('batchData — complexity (dbOnly signature)', () => { describe.skipIf(!canStripTypes)('batch CLI', () => { const cliPath = path.resolve( path.dirname(new URL(import.meta.url).pathname.replace(/^\/([A-Z]:)/i, '$1')), - '../../src/cli.ts', + '../../src/cli.js', ); test('outputs valid JSON', () => { - const loaderPath = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; - const out = execFileSync( - 'node', - ['--import', loaderPath, cliPath, 'batch', 'query', 'authenticate', '--db', dbPath], - { - encoding: 'utf-8', - timeout: 30_000, - }, - ); + const out = execFileSync('node', [cliPath, 'batch', 'query', 'authenticate', '--db', dbPath], { + encoding: 'utf-8', + timeout: 30_000, + }); const parsed = JSON.parse(out); expect(parsed.command).toBe('query'); expect(parsed.total).toBe(1); @@ -236,19 +230,9 @@ describe.skipIf(!canStripTypes)('batch CLI', () => { }); test('batch accepts comma-separated positional targets', () => { - const loaderPath = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; const out = execFileSync( 'node', - [ - '--import', - loaderPath, - cliPath, - 'batch', - 'where', - 'authenticate,validateToken', - '--db', - dbPath, - ], + [cliPath, 'batch', 'where', 'authenticate,validateToken', '--db', dbPath], { encoding: 'utf-8', timeout: 30_000 }, ); const parsed = JSON.parse(out); diff --git a/tests/integration/cli.test.js b/tests/integration/cli.test.js index 5bb29796..52cc8ff1 100644 --- a/tests/integration/cli.test.js +++ b/tests/integration/cli.test.js @@ -13,10 +13,7 @@ import { afterAll, beforeAll, describe, expect, test } from 'vitest'; const [_major, _minor] = process.versions.node.split('.').map(Number); const canStripTypes = _major > 22 || (_major === 22 && _minor >= 6); -import { pathToFileURL } from 'node:url'; - -const CLI = path.resolve('src/cli.ts'); -const LOADER = pathToFileURL(path.resolve('scripts/ts-resolve-loader.js')).href; +const CLI = path.resolve('src/cli.js'); const FIXTURE_FILES = { 'math.js': ` @@ -47,7 +44,7 @@ let tmpDir, tmpHome, dbPath; /** Run the CLI and return stdout as a string. Throws on non-zero exit. */ function run(...args) { - return execFileSync('node', ['--import', LOADER, CLI, ...args], { + return execFileSync('node', [CLI, ...args], { cwd: tmpDir, encoding: 'utf-8', timeout: 30_000, @@ -249,7 +246,7 @@ describe.skipIf(!canStripTypes)('Registry CLI commands', () => { /** Run CLI with isolated HOME to avoid touching real registry */ function runReg(...args) { - return execFileSync('node', ['--import', LOADER, CLI, ...args], { + return execFileSync('node', [CLI, ...args], { cwd: tmpDir, encoding: 'utf-8', timeout: 30_000, From 7e435de42850dd4001c4228bf6ff6927de7c9547 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 00:32:17 -0600 Subject: [PATCH 06/26] fix(types): make McpToolContext.dbPath reflect nullable return of resolveDbPath (#581) Impact: 2 functions changed, 3 affected --- src/mcp/server.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mcp/server.ts b/src/mcp/server.ts index b16eaa91..cdfa449a 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -20,7 +20,7 @@ import { buildToolList } from './tool-registry.js'; import { TOOL_HANDLERS } from './tools/index.js'; export interface McpToolContext { - dbPath: string; + dbPath: string | undefined; // biome-ignore lint/suspicious/noExplicitAny: lazy-loaded queries module getQueries(): Promise; // biome-ignore lint/suspicious/noExplicitAny: lazy-loaded better-sqlite3 constructor @@ -155,7 +155,7 @@ export async function startMCPServer( } const ctx: McpToolContext = { - dbPath: dbPath!, + dbPath: dbPath, getQueries, getDatabase, findDbPath, From edd3abad26de708599579ff46ca14596ade550e5 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:08:59 -0600 Subject: [PATCH 07/26] fix(types): add non-null assertions for Commander positional args (#581) With noUncheckedIndexedAccess enabled, destructuring from string[] gives string | undefined. Commander guarantees required positional args (marked with <>) are present, so non-null assertions are safe here. Also fixes PlotConfig type bridge and co-change return type casts. Impact: 20 functions changed, 1 affected --- src/cli/commands/audit.ts | 4 ++-- src/cli/commands/batch.ts | 2 +- src/cli/commands/branch-compare.ts | 2 +- src/cli/commands/brief.ts | 2 +- src/cli/commands/cfg.ts | 2 +- src/cli/commands/children.ts | 2 +- src/cli/commands/co-change.ts | 4 ++-- src/cli/commands/context.ts | 2 +- src/cli/commands/dataflow.ts | 2 +- src/cli/commands/deps.ts | 4 ++-- src/cli/commands/exports.ts | 2 +- src/cli/commands/fn-impact.ts | 2 +- src/cli/commands/impact.ts | 2 +- src/cli/commands/implementations.ts | 2 +- src/cli/commands/interfaces.ts | 2 +- src/cli/commands/path.ts | 2 +- src/cli/commands/plot.ts | 3 ++- src/cli/commands/query.ts | 4 ++-- src/cli/commands/sequence.ts | 2 +- src/presentation/flow.ts | 8 ++++++-- 20 files changed, 30 insertions(+), 25 deletions(-) diff --git a/src/cli/commands/audit.ts b/src/cli/commands/audit.ts index 6a511df7..93258106 100644 --- a/src/cli/commands/audit.ts +++ b/src/cli/commands/audit.ts @@ -29,13 +29,13 @@ export const command: CommandDefinition = { execute([target], opts, ctx) { const qOpts = ctx.resolveQueryOpts(opts); if (opts.quick) { - explain(target, opts.db, { + explain(target!, opts.db, { depth: parseInt(opts.depth as string, 10), ...qOpts, }); return; } - audit(target, opts.db, { + audit(target!, opts.db, { depth: parseInt(opts.depth as string, 10), file: opts.file, kind: opts.kind, diff --git a/src/cli/commands/batch.ts b/src/cli/commands/batch.ts index 22caae96..abfbf16c 100644 --- a/src/cli/commands/batch.ts +++ b/src/cli/commands/batch.ts @@ -79,7 +79,7 @@ export const command: CommandDefinition = { const data = multiBatchData(targets as MultiBatchItem[], opts.db, batchOpts); console.log(JSON.stringify(data, null, 2)); } else { - batch(command, targets as string[], opts.db, batchOpts); + batch(command!, targets as string[], opts.db, batchOpts); } }, }; diff --git a/src/cli/commands/branch-compare.ts b/src/cli/commands/branch-compare.ts index 4288f0c6..fc72164f 100644 --- a/src/cli/commands/branch-compare.ts +++ b/src/cli/commands/branch-compare.ts @@ -12,7 +12,7 @@ export const command: CommandDefinition = { ], async execute([base, target], opts, ctx) { const { branchCompare } = await import('../../presentation/branch-compare.js'); - await branchCompare(base, target, { + await branchCompare(base!, target!, { engine: ctx.program.opts()['engine'], depth: parseInt(opts.depth as string, 10), noTests: ctx.resolveNoTests(opts), diff --git a/src/cli/commands/brief.ts b/src/cli/commands/brief.ts index 8305b69d..24f105bc 100644 --- a/src/cli/commands/brief.ts +++ b/src/cli/commands/brief.ts @@ -6,7 +6,7 @@ export const command: CommandDefinition = { description: 'Token-efficient file summary: symbols with roles, caller counts, risk tier', queryOpts: true, execute([file], opts, ctx) { - brief(file, opts.db, { + brief(file!, opts.db, { ...ctx.resolveQueryOpts(opts), }); }, diff --git a/src/cli/commands/cfg.ts b/src/cli/commands/cfg.ts index ef0a9be6..5a0d87ea 100644 --- a/src/cli/commands/cfg.ts +++ b/src/cli/commands/cfg.ts @@ -18,7 +18,7 @@ export const command: CommandDefinition = { }, async execute([name], opts, ctx) { const { cfg } = await import('../../presentation/cfg.js'); - cfg(name, opts.db, { + cfg(name!, opts.db, { format: opts.format, file: opts.file, kind: opts.kind, diff --git a/src/cli/commands/children.ts b/src/cli/commands/children.ts index 7e5fe680..32f3932b 100644 --- a/src/cli/commands/children.ts +++ b/src/cli/commands/children.ts @@ -25,7 +25,7 @@ export const command: CommandDefinition = { } }, execute([name], opts, ctx) { - children(name, opts.db, { + children(name!, opts.db, { file: opts.file, kind: opts.kind, ...ctx.resolveQueryOpts(opts), diff --git a/src/cli/commands/co-change.ts b/src/cli/commands/co-change.ts index b3f1a668..e3ef8707 100644 --- a/src/cli/commands/co-change.ts +++ b/src/cli/commands/co-change.ts @@ -60,12 +60,12 @@ export const command: CommandDefinition = { if (file) { const data = coChangeData(file, opts.db, queryOpts); if (!ctx.outputResult(data, 'partners', opts)) { - console.log(formatCoChange(data)); + console.log(formatCoChange(data as unknown as Parameters[0])); } } else { const data = coChangeTopData(opts.db, queryOpts); if (!ctx.outputResult(data, 'pairs', opts)) { - console.log(formatCoChangeTop(data)); + console.log(formatCoChangeTop(data as unknown as Parameters[0])); } } }, diff --git a/src/cli/commands/context.ts b/src/cli/commands/context.ts index 81fea598..ddc613bc 100644 --- a/src/cli/commands/context.ts +++ b/src/cli/commands/context.ts @@ -24,7 +24,7 @@ export const command: CommandDefinition = { } }, execute([name], opts, ctx) { - context(name, opts.db, { + context(name!, opts.db, { depth: parseInt(opts.depth as string, 10), file: opts.file, kind: opts.kind, diff --git a/src/cli/commands/dataflow.ts b/src/cli/commands/dataflow.ts index 74e3bb83..48e8a306 100644 --- a/src/cli/commands/dataflow.ts +++ b/src/cli/commands/dataflow.ts @@ -19,7 +19,7 @@ export const command: CommandDefinition = { }, async execute([name], opts, ctx) { const { dataflow } = await import('../../presentation/dataflow.js'); - dataflow(name, opts.db, { + dataflow(name!, opts.db, { file: opts.file, kind: opts.kind, impact: opts.impact, diff --git a/src/cli/commands/deps.ts b/src/cli/commands/deps.ts index 78618261..b39b9307 100644 --- a/src/cli/commands/deps.ts +++ b/src/cli/commands/deps.ts @@ -10,9 +10,9 @@ export const command: CommandDefinition = { execute([file], opts, ctx) { const qOpts = ctx.resolveQueryOpts(opts); if (opts.brief) { - brief(file, opts.db, qOpts); + brief(file!, opts.db, qOpts); } else { - fileDeps(file, opts.db, qOpts); + fileDeps(file!, opts.db, qOpts); } }, }; diff --git a/src/cli/commands/exports.ts b/src/cli/commands/exports.ts index d81ea06f..db82dad4 100644 --- a/src/cli/commands/exports.ts +++ b/src/cli/commands/exports.ts @@ -7,7 +7,7 @@ export const command: CommandDefinition = { queryOpts: true, options: [['--unused', 'Show only exports with zero consumers (dead exports)']], execute([file], opts, ctx) { - fileExports(file, opts.db, { + fileExports(file!, opts.db, { unused: opts.unused || false, ...ctx.resolveQueryOpts(opts), }); diff --git a/src/cli/commands/fn-impact.ts b/src/cli/commands/fn-impact.ts index 4d81bd8a..9e0c5bb9 100644 --- a/src/cli/commands/fn-impact.ts +++ b/src/cli/commands/fn-impact.ts @@ -23,7 +23,7 @@ export const command: CommandDefinition = { } }, execute([name], opts, ctx) { - fnImpact(name, opts.db, { + fnImpact(name!, opts.db, { depth: parseInt(opts.depth as string, 10), file: opts.file, kind: opts.kind, diff --git a/src/cli/commands/impact.ts b/src/cli/commands/impact.ts index b3201a7a..c2d0b868 100644 --- a/src/cli/commands/impact.ts +++ b/src/cli/commands/impact.ts @@ -6,7 +6,7 @@ export const command: CommandDefinition = { description: 'Show what depends on this file (transitive)', queryOpts: true, execute([file], opts, ctx) { - impactAnalysis(file, opts.db, { + impactAnalysis(file!, opts.db, { ...ctx.resolveQueryOpts(opts), }); }, diff --git a/src/cli/commands/implementations.ts b/src/cli/commands/implementations.ts index e3b2ec2d..1d2b353f 100644 --- a/src/cli/commands/implementations.ts +++ b/src/cli/commands/implementations.ts @@ -21,7 +21,7 @@ export const command: CommandDefinition = { } }, execute([name], opts, ctx) { - implementations(name, opts.db, { + implementations(name!, opts.db, { file: opts.file, kind: opts.kind, ...ctx.resolveQueryOpts(opts), diff --git a/src/cli/commands/interfaces.ts b/src/cli/commands/interfaces.ts index d7281e47..294e256c 100644 --- a/src/cli/commands/interfaces.ts +++ b/src/cli/commands/interfaces.ts @@ -21,7 +21,7 @@ export const command: CommandDefinition = { } }, execute([name], opts, ctx) { - interfaces(name, opts.db, { + interfaces(name!, opts.db, { file: opts.file, kind: opts.kind, ...ctx.resolveQueryOpts(opts), diff --git a/src/cli/commands/path.ts b/src/cli/commands/path.ts index ff813522..c93718fa 100644 --- a/src/cli/commands/path.ts +++ b/src/cli/commands/path.ts @@ -23,7 +23,7 @@ export const command: CommandDefinition = { } }, execute([from, to], opts, ctx) { - symbolPath(from, to, opts.db, { + symbolPath(from!, to!, opts.db, { maxDepth: opts.depth ? parseInt(opts.depth as string, 10) : 10, edgeKinds: opts.kinds ? (opts.kinds as string).split(',').map((s) => s.trim()) : undefined, reverse: opts.reverse, diff --git a/src/cli/commands/plot.ts b/src/cli/commands/plot.ts index 5901e14d..caa72121 100644 --- a/src/cli/commands/plot.ts +++ b/src/cli/commands/plot.ts @@ -76,7 +76,8 @@ export const command: CommandDefinition = { fileLevel: !opts.functions, noTests: ctx.resolveNoTests(opts), minConfidence: parseFloat(opts.minConfidence as string), - config: plotCfg, + // PlotConfig shapes are structurally compatible; bridge the two declarations + config: plotCfg as never, }); } finally { close(); diff --git a/src/cli/commands/query.ts b/src/cli/commands/query.ts index 25feb86b..4e1a42ba 100644 --- a/src/cli/commands/query.ts +++ b/src/cli/commands/query.ts @@ -29,7 +29,7 @@ export const command: CommandDefinition = { execute([name], opts, ctx) { if (opts.path) { console.error('Note: "query --path" is deprecated, use "codegraph path " instead'); - symbolPath(name, opts.path as string, opts.db, { + symbolPath(name!, opts.path as string, opts.db, { maxDepth: opts.depth ? parseInt(opts.depth as string, 10) : 10, edgeKinds: opts.kinds ? (opts.kinds as string).split(',').map((s) => s.trim()) : undefined, reverse: opts.reverse, @@ -40,7 +40,7 @@ export const command: CommandDefinition = { json: opts.json, }); } else { - fnDeps(name, opts.db, { + fnDeps(name!, opts.db, { depth: parseInt(opts.depth as string, 10), file: opts.file, kind: opts.kind, diff --git a/src/cli/commands/sequence.ts b/src/cli/commands/sequence.ts index 7f9f80fa..cadf1cbb 100644 --- a/src/cli/commands/sequence.ts +++ b/src/cli/commands/sequence.ts @@ -19,7 +19,7 @@ export const command: CommandDefinition = { }, async execute([name], opts, ctx) { const { sequence } = await import('../../presentation/sequence.js'); - sequence(name, opts.db, { + sequence(name!, opts.db, { depth: parseInt(opts.depth as string, 10), file: opts.file, kind: opts.kind, diff --git a/src/presentation/flow.ts b/src/presentation/flow.ts index 1225a042..7aec6f9f 100644 --- a/src/presentation/flow.ts +++ b/src/presentation/flow.ts @@ -16,7 +16,11 @@ interface FlowOpts { csv?: boolean; } -export function flow(name: string, dbPath: string | undefined, opts: FlowOpts = {}): void { +export function flow( + name: string | undefined, + dbPath: string | undefined, + opts: FlowOpts = {}, +): void { if (opts.list) { const data = listEntryPointsData(dbPath, { noTests: opts.noTests, @@ -46,7 +50,7 @@ export function flow(name: string, dbPath: string | undefined, opts: FlowOpts = } // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from flowData - const data = flowData(name, dbPath, opts) as any; + const data = flowData(name!, dbPath, opts) as any; if (outputResult(data, 'steps', opts)) return; if (!data.entry) { From 6636300b207ef1233894ff36e525209e9d5c772f Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:09:13 -0600 Subject: [PATCH 08/26] fix(types): use canonical HalsteadDerivedMetrics from types.ts (#581) Replace the private local HalsteadMetrics interface in complexity.ts with the shared HalsteadDerivedMetrics exported from types.ts to prevent silent divergence. Addresses Greptile review feedback. Impact: 4 functions changed, 3 affected --- src/features/complexity.ts | 22 +++++----------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/src/features/complexity.ts b/src/features/complexity.ts index a581be4a..d78a5f76 100644 --- a/src/features/complexity.ts +++ b/src/features/complexity.ts @@ -22,6 +22,7 @@ import type { BetterSqlite3Database, CodegraphConfig, ComplexityRules, + HalsteadDerivedMetrics, HalsteadRules, LOCMetrics, TreeSitterNode, @@ -35,23 +36,10 @@ const COMPLEXITY_EXTENSIONS = buildExtensionSet(COMPLEXITY_RULES); // ─── Halstead Metrics Computation ───────────────────────────────────────── -interface HalsteadMetrics { - n1: number; - n2: number; - bigN1: number; - bigN2: number; - vocabulary: number; - length: number; - volume: number; - difficulty: number; - effort: number; - bugs: number; -} - export function computeHalsteadMetrics( functionNode: TreeSitterNode, language: string, -): HalsteadMetrics | null { +): HalsteadDerivedMetrics | null { const rules = HALSTEAD_RULES.get(language) as HalsteadRules | undefined; if (!rules) return null; @@ -293,7 +281,7 @@ interface AllMetricsResult { cognitive: number; cyclomatic: number; maxNesting: number; - halstead: HalsteadMetrics | null; + halstead: HalsteadDerivedMetrics | null; loc: LOCMetrics; mi: number; } @@ -321,7 +309,7 @@ export function computeAllMetrics( cognitive: number; cyclomatic: number; maxNesting: number; - halstead: HalsteadMetrics | null; + halstead: HalsteadDerivedMetrics | null; }; // The visitor's finish() in function-level mode returns the raw metrics @@ -358,7 +346,7 @@ interface FileSymbols { cyclomatic: number; maxNesting?: number; maintainabilityIndex?: number; - halstead?: HalsteadMetrics | null; + halstead?: HalsteadDerivedMetrics | null; loc?: LOCMetrics | null; }; }>; From 828ea23dd45fd911dce5091784131a3736c900ef Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:09:26 -0600 Subject: [PATCH 09/26] fix(types): add runtime assertion to nn() helper in cfg-visitor (#581) The nn() helper previously used a bare type assertion to cast TreeSitterNode | null to TreeSitterNode. Now throws a descriptive error if a null node is encountered at runtime. Addresses Greptile review. Impact: 1 functions changed, 20 affected --- src/ast-analysis/visitors/cfg-visitor.ts | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/ast-analysis/visitors/cfg-visitor.ts b/src/ast-analysis/visitors/cfg-visitor.ts index 6d27b8e0..59db422c 100644 --- a/src/ast-analysis/visitors/cfg-visitor.ts +++ b/src/ast-analysis/visitors/cfg-visitor.ts @@ -3,8 +3,11 @@ import type { TreeSitterNode, Visitor, VisitorContext } from '../../types.js'; // biome-ignore lint/suspicious/noExplicitAny: CFG rules are opaque language-specific objects type AnyRules = any; -function nn(node: TreeSitterNode | null): TreeSitterNode { - return node as TreeSitterNode; +function nn(node: TreeSitterNode | null, context?: string): TreeSitterNode { + if (node === null) { + throw new Error(`Unexpected null tree-sitter node${context ? ` (${context})` : ''}`); + } + return node; } interface CfgBlockInternal { From 920201c6c54fa951d584f604f1d8fc8a735be307 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:09:40 -0600 Subject: [PATCH 10/26] fix(types): coerce isMulti to boolean in batch presentation (#581) Add !! to ensure isMulti is inferred as boolean rather than string | false, enabling proper TypeScript control-flow narrowing. Addresses Greptile review. Impact: 1 functions changed, 0 affected --- src/presentation/batch.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/presentation/batch.ts b/src/presentation/batch.ts index d073f5ef..cfa71425 100644 --- a/src/presentation/batch.ts +++ b/src/presentation/batch.ts @@ -37,7 +37,7 @@ export function batchQuery( const isMulti = targets.length > 0 && typeof targets[0] === 'object' && - (targets[0] as MultiBatchTarget).command; + !!(targets[0] as MultiBatchTarget).command; let data: unknown; if (isMulti) { From ef39ef4a4e62149ced73def931d5d2d34f61f3d7 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:31:07 -0600 Subject: [PATCH 11/26] fix(types): remove stale as-any casts for node.id now on TreeSitterNode (#581) Impact: 4 functions changed, 1 affected --- src/ast-analysis/visitors/ast-store-visitor.ts | 6 ++---- src/ast-analysis/visitors/complexity-visitor.ts | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/src/ast-analysis/visitors/ast-store-visitor.ts b/src/ast-analysis/visitors/ast-store-visitor.ts index 9d51d775..08c54a4e 100644 --- a/src/ast-analysis/visitors/ast-store-visitor.ts +++ b/src/ast-analysis/visitors/ast-store-visitor.ts @@ -106,8 +106,7 @@ export function createAstStoreVisitor( name: 'ast-store', enterNode(node: TreeSitterNode, _context: VisitorContext): EnterNodeResult | undefined { - // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface - if (matched.has((node as any).id)) return; + if (matched.has(node.id)) return; const kind = astTypeMap[node.type]; if (!kind) return; @@ -145,8 +144,7 @@ export function createAstStoreVisitor( parentNodeId: resolveParentNodeId(line), }); - // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface - matched.add((node as any).id); + matched.add(node.id); if (kind !== 'string' && kind !== 'regex') { return { skipChildren: true }; diff --git a/src/ast-analysis/visitors/complexity-visitor.ts b/src/ast-analysis/visitors/complexity-visitor.ts index 6dcc9996..de888622 100644 --- a/src/ast-analysis/visitors/complexity-visitor.ts +++ b/src/ast-analysis/visitors/complexity-visitor.ts @@ -68,8 +68,7 @@ function classifyBranchNode( if (cRules.elseViaAlternative) { isElseIf = node.parent?.type === cRules.ifNodeType && - // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface - (node.parent!.childForFieldName('alternative') as any)?.id === (node as any).id; + node.parent?.childForFieldName('alternative')?.id === node.id; } else if (cRules.elseNodeType) { isElseIf = node.parent?.type === cRules.elseNodeType; } @@ -99,8 +98,7 @@ function classifyPlainElse( cRules.elseViaAlternative && type !== cRules.ifNodeType && node.parent?.type === cRules.ifNodeType && - // biome-ignore lint/suspicious/noExplicitAny: tree-sitter node.id exists at runtime but not in our interface - (node.parent!.childForFieldName('alternative') as any)?.id === (node as any).id + node.parent?.childForFieldName('alternative')?.id === node.id ) { acc.cognitive++; } From 116f7b4712cb3e03132969464f5f1643c4642fdc Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:53:49 -0600 Subject: [PATCH 12/26] fix: remove customDbPath non-null assertions in audit.ts (#581) Replace customDbPath! with safe fallback in resolveThresholds (line 29) and remove the assertion when passing to explainData (line 163) by widening explainData's parameter to accept string | undefined, matching the downstream openReadonlyOrFail and findDbPath signatures. Impact: 3 functions changed, 1 affected --- src/domain/analysis/context.ts | 2 +- src/features/audit.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/domain/analysis/context.ts b/src/domain/analysis/context.ts index 8a5789ef..71bb2d19 100644 --- a/src/domain/analysis/context.ts +++ b/src/domain/analysis/context.ts @@ -503,7 +503,7 @@ export function contextData( export function explainData( target: string, - customDbPath: string, + customDbPath?: string, opts: { noTests?: boolean; depth?: number; diff --git a/src/features/audit.ts b/src/features/audit.ts index cfce21ff..1acdf082 100644 --- a/src/features/audit.ts +++ b/src/features/audit.ts @@ -26,7 +26,7 @@ function resolveThresholds( const cfg = config || (() => { - const dbDir = path.dirname(customDbPath!); + const dbDir = customDbPath ? path.dirname(customDbPath) : process.cwd(); const repoRoot = path.resolve(dbDir, '..'); return loadConfig(repoRoot); })(); @@ -160,7 +160,7 @@ export function auditData( const kind = opts.kind; // 1. Get structure via explainData - const explained = explainData(target, customDbPath!, { noTests, depth: 0 }); + const explained = explainData(target, customDbPath, { noTests, depth: 0 }); // Apply --file and --kind filters for function targets // biome-ignore lint/suspicious/noExplicitAny: explainData returns a union type that varies by kind From a5e2d9d6c64b4fd303e1a3a2ceda4b7d11505875 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 01:54:03 -0600 Subject: [PATCH 13/26] fix: replace n.role! with explicit null guard in graph-enrichment (#581) Use an explicit null check so nodes with null roles are intentionally excluded from role filtering rather than relying on a non-null assertion that silences the type checker without documenting the behavior. Impact: 1 functions changed, 3 affected --- src/features/graph-enrichment.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/features/graph-enrichment.ts b/src/features/graph-enrichment.ts index 819f4f18..4ae0e88f 100644 --- a/src/features/graph-enrichment.ts +++ b/src/features/graph-enrichment.ts @@ -168,7 +168,7 @@ function prepareFunctionLevelData( if (cfg.filter.roles) { const roles = new Set(cfg.filter.roles); for (const [id, n] of nodeMap) { - if (!roles.has(n.role!)) nodeMap.delete(id); + if (n.role === null || !roles.has(n.role)) nodeMap.delete(id); } const nodeIds = new Set(nodeMap.keys()); edges = edges.filter((e) => nodeIds.has(e.source_id) && nodeIds.has(e.target_id)); From d9feea20496cc02dd3d5b5f18649d185d7e64c3a Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 02:07:10 -0600 Subject: [PATCH 14/26] fix: address round-3 review feedback on type safety (#581) - cfg.ts: replace tree! non-null assertion with explicit null guard - ast.ts: add defensive guard for JS_TS_AST_TYPES before use - dataflow.ts: document why local DataflowResult differs from canonical type - mcp/tools/index.ts: rename path import to pathTool to avoid Node.js shadow Impact: 2 functions changed, 2 affected --- src/features/ast.ts | 6 +++++- src/features/cfg.ts | 3 ++- src/features/dataflow.ts | 6 ++++++ src/mcp/tools/index.ts | 4 ++-- 4 files changed, 15 insertions(+), 4 deletions(-) diff --git a/src/features/ast.ts b/src/features/ast.ts index b2abf326..fec0a3b0 100644 --- a/src/features/ast.ts +++ b/src/features/ast.ts @@ -155,7 +155,11 @@ function walkAst( rows: AstRow[], nodeIdMap: Map, ): void { - const visitor = createAstStoreVisitor(JS_TS_AST_TYPES!, defs, relPath, nodeIdMap); + if (!JS_TS_AST_TYPES) { + debug('ast-store: JS_TS_AST_TYPES not available — skipping walk'); + return; + } + const visitor = createAstStoreVisitor(JS_TS_AST_TYPES, defs, relPath, nodeIdMap); const results = walkWithVisitors(rootNode, [visitor], 'javascript'); const collected = (results['ast-store'] || []) as AstRow[]; rows.push(...collected); diff --git a/src/features/cfg.ts b/src/features/cfg.ts index bf391a15..d439f9d9 100644 --- a/src/features/cfg.ts +++ b/src/features/cfg.ts @@ -166,7 +166,8 @@ function getTreeAndLang( if (!langId) return null; } - return { tree: tree!, langId }; + if (!tree) return null; + return { tree, langId }; } interface VisitorCfgResult { diff --git a/src/features/dataflow.ts b/src/features/dataflow.ts index c1831674..ab5ae728 100644 --- a/src/features/dataflow.ts +++ b/src/features/dataflow.ts @@ -36,6 +36,12 @@ export const DATAFLOW_EXTENSIONS = buildExtensionSet(DATAFLOW_RULES); // ── extractDataflow ────────────────────────────────────────────────────────── +// Note: This local DataflowResult intentionally differs from the canonical +// DataflowResult in types.ts. The canonical type describes the visitor's raw +// output shape, while this module's insertDataflowEdges casts fields to +// richer local interfaces (ArgFlow, Assignment, Mutation) with additional +// properties populated during resolution. Aligning them requires unifying +// the visitor output and DB-insertion shapes — tracked separately. interface DataflowResult { parameters: unknown[]; returns: unknown[]; diff --git a/src/mcp/tools/index.ts b/src/mcp/tools/index.ts index d57412a1..0cb0c4a6 100644 --- a/src/mcp/tools/index.ts +++ b/src/mcp/tools/index.ts @@ -37,7 +37,7 @@ import * as listFunctions from './list-functions.js'; import * as listRepos from './list-repos.js'; import * as moduleMap from './module-map.js'; import * as nodeRoles from './node-roles.js'; -import * as path from './path.js'; +import * as pathTool from './path.js'; import * as query from './query.js'; import * as semanticSearch from './semantic-search.js'; import * as sequence from './sequence.js'; @@ -48,7 +48,7 @@ import * as where from './where.js'; export const TOOL_HANDLERS = new Map([ [query.name, query], - [path.name, path], + [pathTool.name, pathTool], [fileDeps.name, fileDeps], [fileExports.name, fileExports], [impactAnalysis.name, impactAnalysis], From 86f3182d15fbeadb9ffe1cc30b6ca131ba10d8bb Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 02:12:11 -0600 Subject: [PATCH 15/26] fix: preserve native CFG path when tree is undefined in getTreeAndLang (#581) Return { tree: null, langId } instead of null when allNative is true but no WASM tree exists, so the caller can still process pre-computed native CFG data from definitions. Impact: 2 functions changed, 1 affected --- src/features/cfg.ts | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/features/cfg.ts b/src/features/cfg.ts index d439f9d9..3ffdadfc 100644 --- a/src/features/cfg.ts +++ b/src/features/cfg.ts @@ -124,7 +124,7 @@ function getTreeAndLang( extToLang: Map, parsers: unknown, getParserFn: unknown, -): { tree: { rootNode: TreeSitterNode }; langId: string } | null { +): { tree: { rootNode: TreeSitterNode } | null; langId: string } | null { const ext = path.extname(relPath).toLowerCase(); let tree = symbols._tree; let langId = symbols._langId; @@ -166,8 +166,7 @@ function getTreeAndLang( if (!langId) return null; } - if (!tree) return null; - return { tree, langId }; + return { tree: tree ?? null, langId }; } interface VisitorCfgResult { @@ -177,7 +176,7 @@ interface VisitorCfgResult { } function buildVisitorCfgMap( - tree: { rootNode: TreeSitterNode } | undefined, + tree: { rootNode: TreeSitterNode } | null | undefined, cfgRules: unknown, symbols: FileSymbols, langId: string, From 96060b44cb60076138ebfe975c4c021359e89969 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 02:57:27 -0600 Subject: [PATCH 16/26] fix: forward refinementTheta through louvainCommunities to detectClusters (#581) --- src/features/communities.ts | 1 + src/graph/algorithms/louvain.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/features/communities.ts b/src/features/communities.ts index f448e176..dbea6bfa 100644 --- a/src/features/communities.ts +++ b/src/features/communities.ts @@ -177,6 +177,7 @@ export function communitiesData( resolution, maxLevels, maxLocalPasses, + refinementTheta, }); const { communities, communityDirs } = buildCommunityObjects(graph, assignments, opts); diff --git a/src/graph/algorithms/louvain.ts b/src/graph/algorithms/louvain.ts index c8643b93..b0776db8 100644 --- a/src/graph/algorithms/louvain.ts +++ b/src/graph/algorithms/louvain.ts @@ -14,6 +14,7 @@ export interface LouvainOptions { resolution?: number; maxLevels?: number; maxLocalPasses?: number; + refinementTheta?: number; } export interface LouvainResult { @@ -33,6 +34,7 @@ export function louvainCommunities(graph: CodeGraph, opts: LouvainOptions = {}): directed: false, ...(opts.maxLevels != null && { maxLevels: opts.maxLevels }), ...(opts.maxLocalPasses != null && { maxLocalPasses: opts.maxLocalPasses }), + ...(opts.refinementTheta != null && { refinementTheta: opts.refinementTheta }), }); const assignments = new Map(); From de985f965f26131260646b4afe39e3d5ad2ec17b Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 09:33:51 -0600 Subject: [PATCH 17/26] fix(types): address remaining review feedback (#581) - Add guard for undefined name in flow.ts instead of non-null assertion - Add biome-ignore comments explaining fileSymbols casts in engine.ts - Tighten CommandDefinition.options tuple type in types.ts Impact: 3 functions changed, 2 affected --- src/ast-analysis/engine.ts | 2 ++ src/presentation/flow.ts | 9 ++++++++- src/types.ts | 8 ++++++-- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/ast-analysis/engine.ts b/src/ast-analysis/engine.ts index 1cea727e..740df5bc 100644 --- a/src/ast-analysis/engine.ts +++ b/src/ast-analysis/engine.ts @@ -339,6 +339,7 @@ async function delegateToBuildFunctions( const t0 = performance.now(); try { const { buildAstNodes } = await import('../features/ast.js'); + // biome-ignore lint/suspicious/noExplicitAny: ExtractorOutput is a superset of the local FileSymbols expected by buildAstNodes await buildAstNodes(db, fileSymbols as Map, rootDir, engineOpts); } catch (err: unknown) { debug(`buildAstNodes failed: ${(err as Error).message}`); @@ -350,6 +351,7 @@ async function delegateToBuildFunctions( const t0 = performance.now(); try { const { buildComplexityMetrics } = await import('../features/complexity.js'); + // biome-ignore lint/suspicious/noExplicitAny: ExtractorOutput is a superset of the local FileSymbols expected by buildComplexityMetrics await buildComplexityMetrics(db, fileSymbols as Map, rootDir, engineOpts); } catch (err: unknown) { debug(`buildComplexityMetrics failed: ${(err as Error).message}`); diff --git a/src/presentation/flow.ts b/src/presentation/flow.ts index 7aec6f9f..fcc34e66 100644 --- a/src/presentation/flow.ts +++ b/src/presentation/flow.ts @@ -49,8 +49,15 @@ export function flow( return; } + if (!name) { + console.log( + 'Please provide a function or entry-point name. Use --list to see available entry points.', + ); + return; + } + // biome-ignore lint/suspicious/noExplicitAny: dynamic shape from flowData - const data = flowData(name!, dbPath, opts) as any; + const data = flowData(name, dbPath, opts) as any; if (outputResult(data, 'steps', opts)) return; if (!data.entry) { diff --git a/src/types.ts b/src/types.ts index d345fb0a..3b91bf9d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1836,8 +1836,12 @@ export interface CommandDefinition { name: string; description: string; queryOpts?: boolean; - // biome-ignore lint/suspicious/noExplicitAny: option tuples contain mixed types (strings, functions, defaults) - options?: Array<[string, string, ...any[]]>; + options?: Array< + | [string, string] + | [string, string, string | boolean | number] + // biome-ignore lint/suspicious/noExplicitAny: Commander parse functions accept arbitrary return types + | [string, string, (val: string) => any, string | boolean | number] + >; // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed validate?: (args: any[], opts: any, ctx: CommandContext) => string | undefined | void; // biome-ignore lint/suspicious/noExplicitAny: Commander options are dynamically typed From 40c9886b44c6fc69df50f22e823dec72f22f47fd Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 13:13:14 -0600 Subject: [PATCH 18/26] fix: delete stale src/index.js superseded by src/index.ts (#581) --- src/index.js | 67 ---------------------------------------------------- 1 file changed, 67 deletions(-) delete mode 100644 src/index.js diff --git a/src/index.js b/src/index.js deleted file mode 100644 index 99bb4aea..00000000 --- a/src/index.js +++ /dev/null @@ -1,67 +0,0 @@ -/** - * codegraph — Programmatic API - * - * Curated public surface: *Data() query functions, graph building, - * export formats, and essential constants. CLI formatters and internal - * utilities are not exported — import them directly if needed. - * - * Usage: - * import { buildGraph, queryNameData, findCycles, exportDOT } from '@optave/codegraph'; - */ - -export { buildGraph } from './domain/graph/builder.js'; -export { findCycles } from './domain/graph/cycles.js'; -export { - briefData, - childrenData, - contextData, - diffImpactData, - explainData, - exportsData, - fileDepsData, - fnDepsData, - fnImpactData, - impactAnalysisData, - moduleMapData, - pathData, - queryNameData, - rolesData, - statsData, - whereData, -} from './domain/queries.js'; -export { - buildEmbeddings, - hybridSearchData, - multiSearchData, - searchData, -} from './domain/search/index.js'; -export { astQueryData } from './features/ast.js'; -export { auditData } from './features/audit.js'; -export { batchData } from './features/batch.js'; -export { branchCompareData } from './features/branch-compare.js'; -export { cfgData } from './features/cfg.js'; -export { checkData } from './features/check.js'; -export { coChangeData } from './features/cochange.js'; -export { communitiesData } from './features/communities.js'; -export { complexityData } from './features/complexity.js'; -export { dataflowData } from './features/dataflow.js'; -export { exportDOT, exportJSON, exportMermaid } from './features/export.js'; -export { flowData, listEntryPointsData } from './features/flow.js'; -export { manifestoData } from './features/manifesto.js'; -export { ownersData } from './features/owners.js'; -export { sequenceData } from './features/sequence.js'; -export { hotspotsData, moduleBoundariesData, structureData } from './features/structure.js'; -export { triageData } from './features/triage.js'; -export { loadConfig } from './infrastructure/config.js'; -export { EXTENSIONS, IGNORE_DIRS } from './shared/constants.js'; -export { - AnalysisError, - BoundaryError, - CodegraphError, - ConfigError, - DbError, - EngineError, - ParseError, - ResolutionError, -} from './shared/errors.js'; -export { EVERY_EDGE_KIND, EVERY_SYMBOL_KIND } from './shared/kinds.js'; From c4cd095959e10d74c594268d73db36725f18185f Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 13:13:25 -0600 Subject: [PATCH 19/26] fix: add langId to null guard in upsertAstComplexity (#581) Impact: 1 functions changed, 3 affected --- src/features/complexity.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/features/complexity.ts b/src/features/complexity.ts index d78a5f76..bb557010 100644 --- a/src/features/complexity.ts +++ b/src/features/complexity.ts @@ -461,12 +461,12 @@ function upsertAstComplexity( langId: string | null | undefined, rules: ComplexityRules | undefined, ): number { - if (!tree || !rules) return 0; + if (!tree || !rules || !langId) return 0; const funcNode = _findFunctionNode(tree.rootNode, def.line, def.endLine ?? def.line, rules); if (!funcNode) return 0; - const metrics = computeAllMetrics(funcNode, langId!); + const metrics = computeAllMetrics(funcNode, langId); if (!metrics) return 0; const nodeId = getFunctionNodeId(db, def.name, relPath, def.line); From 51f0a87a8c5833bb6a9c1bb55ac254c9f7ca7ed2 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 14:43:10 -0600 Subject: [PATCH 20/26] fix(types): align TriageDataOpts.role with canonical Role type (#581) Changed TriageDataOpts.role from string to Role (branded union from types.ts), removed the & undefined intersection cast that erased the type constraint, and added Role cast at the MCP call site. Impact: 3 functions changed, 5 affected --- src/features/triage.ts | 6 +++--- src/mcp/tools/triage.ts | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/features/triage.ts b/src/features/triage.ts index 2eea8ffa..de3fc90e 100644 --- a/src/features/triage.ts +++ b/src/features/triage.ts @@ -5,7 +5,7 @@ import { loadConfig } from '../infrastructure/config.js'; import { warn } from '../infrastructure/logger.js'; import { isTestFile } from '../infrastructure/test-filter.js'; import { paginateResult } from '../shared/paginate.js'; -import type { CodegraphConfig, TriageNodeRow } from '../types.js'; +import type { CodegraphConfig, Role, TriageNodeRow } from '../types.js'; // ─── Scoring ───────────────────────────────────────────────────────── @@ -110,7 +110,7 @@ interface TriageDataOpts { weights?: Partial; file?: string; kind?: string; - role?: string; + role?: Role; limit?: number; offset?: number; repo?: Repository; @@ -147,7 +147,7 @@ export function triageData( noTests, file: opts.file || undefined, kind: opts.kind || undefined, - role: (opts.role || undefined) as TriageDataOpts['role'] & undefined, + role: opts.role || undefined, }); } catch (err: unknown) { warn(`triage query failed: ${(err as Error).message}`); diff --git a/src/mcp/tools/triage.ts b/src/mcp/tools/triage.ts index fc73bd2a..7e7ce3e6 100644 --- a/src/mcp/tools/triage.ts +++ b/src/mcp/tools/triage.ts @@ -1,3 +1,4 @@ +import type { Role } from '../../types.js'; import { effectiveLimit, effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; import type { McpToolContext } from '../server.js'; @@ -38,7 +39,7 @@ export async function handler(args: TriageArgs, ctx: McpToolContext): Promise Date: Tue, 24 Mar 2026 14:43:22 -0600 Subject: [PATCH 21/26] fix(types): remove unnecessary double cast on config.analysis.sequenceDepth (#581) CodegraphConfig.analysis.sequenceDepth is already typed as number in types.ts, so the double cast to { analysis?: { sequenceDepth?: number } } is unnecessary and misleading. Impact: 1 functions changed, 5 affected --- src/features/sequence.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/features/sequence.ts b/src/features/sequence.ts index 94202c59..04fa575f 100644 --- a/src/features/sequence.ts +++ b/src/features/sequence.ts @@ -278,10 +278,7 @@ export function sequenceData( const { repo, close } = openRepo(dbPath, opts); try { const config = opts.config || loadConfig(); - const maxDepth = - opts.depth || - (config as unknown as { analysis?: { sequenceDepth?: number } }).analysis?.sequenceDepth || - 10; + const maxDepth = opts.depth || config.analysis.sequenceDepth || 10; const noTests = opts.noTests || false; const matchNode = findEntryNode(repo, name, opts); From 35de587ad0e2a39cf56533e806ac5b152fa6c086 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 14:43:35 -0600 Subject: [PATCH 22/26] fix(types): use canonical PlotConfig from viewer.ts in graph-enrichment (#581) Replaced local PlotConfig subset interface with the canonical export from presentation/viewer.ts, eliminating type duplication and the double cast on DEFAULT_CONFIG. Added optional chaining for fields that are optional in the canonical type. Impact: 4 functions changed, 3 affected --- src/features/graph-enrichment.ts | 33 +++++++++----------------------- 1 file changed, 9 insertions(+), 24 deletions(-) diff --git a/src/features/graph-enrichment.ts b/src/features/graph-enrichment.ts index 4ae0e88f..ce0505e5 100644 --- a/src/features/graph-enrichment.ts +++ b/src/features/graph-enrichment.ts @@ -7,7 +7,7 @@ import { DEFAULT_NODE_COLORS, DEFAULT_ROLE_COLORS, } from '../presentation/colors.js'; -import { DEFAULT_CONFIG, renderPlotHTML } from '../presentation/viewer.js'; +import { DEFAULT_CONFIG, type PlotConfig, renderPlotHTML } from '../presentation/viewer.js'; import type { BetterSqlite3Database } from '../types.js'; // Re-export presentation utilities for backward compatibility @@ -17,21 +17,6 @@ const DEFAULT_MIN_CONFIDENCE = 0.5; // ─── Data Preparation ───────────────────────────────────────────────── -interface PlotConfig { - filter: { - kinds?: string[] | null; - files?: string[] | null; - roles?: string[] | null; - }; - colorBy?: string; - nodeColors: Record; - roleColors: Record; - riskThresholds?: { highBlastRadius?: number; lowMI?: number }; - seedStrategy?: string; - seedCount?: number; - title?: string; -} - interface VisNode { id: number | string; label: string; @@ -75,7 +60,7 @@ export function prepareGraphData( const fileLevel = opts.fileLevel !== false; const noTests = opts.noTests || false; const minConf = opts.minConfidence ?? DEFAULT_MIN_CONFIDENCE; - const cfg = opts.config || (DEFAULT_CONFIG as unknown as PlotConfig); + const cfg = opts.config || DEFAULT_CONFIG; return fileLevel ? prepareFileLevelData(db, noTests, minConf, cfg) @@ -125,11 +110,11 @@ function prepareFunctionLevelData( if (noTests) edges = edges.filter((e) => !isTestFile(e.source_file) && !isTestFile(e.target_file)); - if (cfg.filter.kinds) { + if (cfg.filter?.kinds) { const kinds = new Set(cfg.filter.kinds); edges = edges.filter((e) => kinds.has(e.source_kind) && kinds.has(e.target_kind)); } - if (cfg.filter.files) { + if (cfg.filter?.files) { const patterns = cfg.filter.files; edges = edges.filter( (e) => @@ -165,7 +150,7 @@ function prepareFunctionLevelData( } } - if (cfg.filter.roles) { + if (cfg.filter?.roles) { const roles = new Set(cfg.filter.roles); for (const [id, n] of nodeMap) { if (n.role === null || !roles.has(n.role)) nodeMap.delete(id); @@ -253,12 +238,12 @@ function prepareFunctionLevelData( const color: string = cfg.colorBy === 'role' && n.role - ? cfg.roleColors[n.role] || + ? cfg.roleColors?.[n.role] || (DEFAULT_ROLE_COLORS as Record)[n.role] || '#ccc' : cfg.colorBy === 'community' && community !== null ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] || '#ccc' - : cfg.nodeColors[n.kind] || + : cfg.nodeColors?.[n.kind] || (DEFAULT_NODE_COLORS as Record)[n.kind] || '#ccc'; @@ -371,7 +356,7 @@ function prepareFileLevelData( const color: string = cfg.colorBy === 'community' && community !== null ? COMMUNITY_COLORS[community % COMMUNITY_COLORS.length] || '#ccc' - : cfg.nodeColors['file'] || + : cfg.nodeColors?.['file'] || (DEFAULT_NODE_COLORS as Record)['file'] || '#ccc'; @@ -425,7 +410,7 @@ export function generatePlotHTML( config?: PlotConfig; } = {}, ): string { - const cfg = opts.config || (DEFAULT_CONFIG as unknown as PlotConfig); + const cfg = opts.config || DEFAULT_CONFIG; const data = prepareGraphData(db, opts); return renderPlotHTML(data, cfg); } From 4590c4e5d2a3b92eea9d50f363b76f0485ccc849 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 14:43:47 -0600 Subject: [PATCH 23/26] fix(types): replace as-any with narrowed type guard on MCP handler result (#581) The handler result was cast to any just to check for .content. Now uses a proper unknown type with a runtime in-operator guard to narrow the type safely. Impact: 1 functions changed, 1 affected --- src/mcp/server.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/mcp/server.ts b/src/mcp/server.ts index cdfa449a..38a0506b 100644 --- a/src/mcp/server.ts +++ b/src/mcp/server.ts @@ -162,8 +162,10 @@ export async function startMCPServer( allowedRepos, MCP_MAX_LIMIT, }; - const result = (await toolEntry.handler(args, ctx)) as any; - if (result?.content) return result; + const result: unknown = await toolEntry.handler(args, ctx); + if (result && typeof result === 'object' && 'content' in result) { + return result as { content: Array<{ type: string; text: string }> }; + } return { content: [{ type: 'text', text: JSON.stringify(result, null, 2) }] }; } catch (err: unknown) { const code = err instanceof CodegraphError ? err.code : 'UNKNOWN_ERROR'; From bde4a4409538d9f0feee639f4f6c78cc3a3c5b5b Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 15:07:11 -0600 Subject: [PATCH 24/26] fix(types): use ctx.MCP_MAX_LIMIT in semantic-search tool (#581) Replace module-level MCP_MAX_LIMIT import with ctx.MCP_MAX_LIMIT to respect per-server config.mcp.maxResultLimit, consistent with all other MCP tools. Impact: 1 functions changed, 2 affected --- src/mcp/tools/semantic-search.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mcp/tools/semantic-search.ts b/src/mcp/tools/semantic-search.ts index 57dbde1d..344fcc60 100644 --- a/src/mcp/tools/semantic-search.ts +++ b/src/mcp/tools/semantic-search.ts @@ -1,4 +1,4 @@ -import { effectiveOffset, MCP_DEFAULTS, MCP_MAX_LIMIT } from '../middleware.js'; +import { effectiveOffset, MCP_DEFAULTS } from '../middleware.js'; import type { McpToolContext } from '../server.js'; export const name = 'semantic_search'; @@ -14,7 +14,7 @@ interface SemanticSearchArgs { export async function handler(args: SemanticSearchArgs, ctx: McpToolContext): Promise { const mode = args.mode || 'hybrid'; const searchOpts = { - limit: Math.min(args.limit ?? MCP_DEFAULTS['semantic_search'] ?? 100, MCP_MAX_LIMIT), + limit: Math.min(args.limit ?? MCP_DEFAULTS['semantic_search'] ?? 100, ctx.MCP_MAX_LIMIT), offset: effectiveOffset(args), minScore: args.min_score, }; From bbdaef79f89f9ea41c102302e91826992acc77fe Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 15:07:23 -0600 Subject: [PATCH 25/26] fix(types): handle null field in outputResult ndjson branch (#581) When field is null, emit the whole data object as a single NDJSON line instead of casting null to string and passing it to printNdjson. Impact: 1 functions changed, 69 affected --- src/presentation/result-formatter.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/presentation/result-formatter.ts b/src/presentation/result-formatter.ts index bc5ffcba..1f9580bc 100644 --- a/src/presentation/result-formatter.ts +++ b/src/presentation/result-formatter.ts @@ -121,7 +121,12 @@ export function outputResult( opts: OutputOpts, ): boolean { if (opts.ndjson) { - printNdjson(data, field as string); + if (field === null) { + // No field key — emit the whole object as a single NDJSON line + console.log(JSON.stringify(data)); + } else { + printNdjson(data, field); + } return true; } if (opts.json) { From 4b4a22d924ce0a9db1905627ff0789a4734d7c80 Mon Sep 17 00:00:00 2001 From: carlos-alm <127798846+carlos-alm@users.noreply.github.com> Date: Tue, 24 Mar 2026 15:07:35 -0600 Subject: [PATCH 26/26] fix(types): replace opts as any with explicit property mapping in check (#581) Map CheckCliOpts fields to CheckOpts explicitly instead of casting through any, preserving type safety at the presentation-feature boundary. Impact: 1 functions changed, 1 affected --- src/presentation/check.ts | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/src/presentation/check.ts b/src/presentation/check.ts index 0887d2f6..27520c7c 100644 --- a/src/presentation/check.ts +++ b/src/presentation/check.ts @@ -53,7 +53,16 @@ interface CheckDataResult { } export function check(customDbPath: string | undefined, opts: CheckCliOpts = {}): void { - const data = checkData(customDbPath, opts as any) as CheckDataResult; + const data = checkData(customDbPath, { + ref: opts.ref, + staged: opts.staged, + cycles: opts.cycles !== undefined ? Boolean(opts.cycles) : undefined, + blastRadius: opts.blastRadius, + signatures: opts.signatures, + boundaries: opts.boundaries, + depth: opts.depth, + noTests: opts.noTests, + }) as CheckDataResult; if (data.error) { throw new AnalysisError(data.error);