diff --git a/src/db/index.js b/src/db/index.js index 7d938e1d..1f657a83 100644 --- a/src/db/index.js +++ b/src/db/index.js @@ -56,6 +56,8 @@ export { getFileNodesAll, getFunctionNodeId, getImportEdges, + getLineCountForNode, + getMaxEndLineForFile, getNodeId, hasCfgTables, hasCoChanges, diff --git a/src/db/repository/index.ts b/src/db/repository/index.ts index b96618b8..94a8cfab 100644 --- a/src/db/repository/index.ts +++ b/src/db/repository/index.ts @@ -42,6 +42,8 @@ export { findNodesForTriage, findNodesWithFanIn, getFunctionNodeId, + getLineCountForNode, + getMaxEndLineForFile, getNodeId, iterateFunctionNodes, listFunctionNodes, diff --git a/src/db/repository/nodes.ts b/src/db/repository/nodes.ts index 2fcb3ded..e809b0fa 100644 --- a/src/db/repository/nodes.ts +++ b/src/db/repository/nodes.ts @@ -275,3 +275,37 @@ export function findNodeByQualifiedName( 'SELECT * FROM nodes WHERE qualified_name = ? ORDER BY file, line', ).all(qualifiedName); } + +// ─── Metric helpers ────────────────────────────────────────────────────── + +const _getLineCountForNodeStmt = new WeakMap(); + +/** + * Get line_count from node_metrics for a given node. + * @param {object} db + * @param {number} nodeId + * @returns {{ line_count: number } | undefined} + */ +export function getLineCountForNode(db: BetterSqlite3Database, nodeId: number) { + return cachedStmt( + _getLineCountForNodeStmt, + db, + 'SELECT line_count FROM node_metrics WHERE node_id = ?', + ).get(nodeId); +} + +const _getMaxEndLineForFileStmt = new WeakMap(); + +/** + * Get the maximum end_line across all nodes in a file. + * @param {object} db + * @param {string} file + * @returns {{ max_end: number | null } | undefined} + */ +export function getMaxEndLineForFile(db: BetterSqlite3Database, file: string) { + return cachedStmt( + _getMaxEndLineForFileStmt, + db, + 'SELECT MAX(end_line) as max_end FROM nodes WHERE file = ?', + ).get(file); +} diff --git a/src/domain/analysis/brief.ts b/src/domain/analysis/brief.ts index e88463d2..b02704c7 100644 --- a/src/domain/analysis/brief.ts +++ b/src/domain/analysis/brief.ts @@ -9,6 +9,7 @@ import { } from '../../db/index.js'; import { loadConfig } from '../../infrastructure/config.js'; import { isTestFile } from '../../infrastructure/test-filter.js'; +import type { BetterSqlite3Database, ImportEdgeRow, NodeRow, RelatedNodeRow } from '../../types.js'; /** Symbol kinds meaningful for a file brief — excludes parameters, properties, constants. */ const BRIEF_KINDS = new Set([ @@ -47,15 +48,19 @@ function computeRiskTier( * BFS to count transitive callers for a single node. * Lightweight variant — only counts, does not collect details. */ -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function countTransitiveCallers(db: any, startId: number, noTests: boolean, maxDepth = 5): number { +function countTransitiveCallers( + db: BetterSqlite3Database, + startId: number, + noTests: boolean, + maxDepth = 5, +): number { const visited = new Set([startId]); let frontier = [startId]; for (let d = 1; d <= maxDepth; d++) { const nextFrontier: number[] = []; for (const fid of frontier) { - const callers = findDistinctCallers(db, fid); + const callers = findDistinctCallers(db, fid) as RelatedNodeRow[]; for (const c of callers) { if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { visited.add(c.id); @@ -75,8 +80,7 @@ function countTransitiveCallers(db: any, startId: number, noTests: boolean, maxD * Depth-bounded to match countTransitiveCallers and keep hook latency predictable. */ function countTransitiveImporters( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, + db: BetterSqlite3Database, fileNodeIds: number[], noTests: boolean, maxDepth = 5, @@ -87,7 +91,7 @@ function countTransitiveImporters( for (let d = 1; d <= maxDepth; d++) { const nextFrontier: number[] = []; for (const current of frontier) { - const dependents = findImportDependents(db, current); + const dependents = findImportDependents(db, current) as RelatedNodeRow[]; for (const dep of dependents) { if (!visited.has(dep.id) && (!noTests || !isTestFile(dep.file))) { visited.add(dep.id); @@ -108,10 +112,10 @@ function countTransitiveImporters( */ export function briefData( file: string, - customDbPath: string | undefined, - // biome-ignore lint/suspicious/noExplicitAny: config shape not yet typed + customDbPath: string, + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic opts: { noTests?: boolean; config?: any } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -120,14 +124,14 @@ export function briefData( const importerDepth = config.analysis?.briefImporterDepth ?? 5; const highRiskCallers = config.analysis?.briefHighRiskCallers ?? 10; const mediumRiskCallers = config.analysis?.briefMediumRiskCallers ?? 3; - const fileNodes = findFileNodes(db, `%${file}%`); + const fileNodes = findFileNodes(db, `%${file}%`) as NodeRow[]; if (fileNodes.length === 0) { return { file, results: [] }; } const results = fileNodes.map((fn) => { // Direct importers - let importedBy = findImportSources(db, fn.id); + let importedBy = findImportSources(db, fn.id) as ImportEdgeRow[]; if (noTests) importedBy = importedBy.filter((i) => !isTestFile(i.file)); const directImporters = [...new Set(importedBy.map((i) => i.file))]; @@ -135,11 +139,13 @@ export function briefData( const totalImporterCount = countTransitiveImporters(db, [fn.id], noTests, importerDepth); // Direct imports - let importsTo = findImportTargets(db, fn.id); + let importsTo = findImportTargets(db, fn.id) as ImportEdgeRow[]; if (noTests) importsTo = importsTo.filter((i) => !isTestFile(i.file)); // Symbol definitions with roles and caller counts - const defs = findNodesByFile(db, fn.file).filter((d) => BRIEF_KINDS.has(d.kind)); + const defs = (findNodesByFile(db, fn.file) as NodeRow[]).filter((d) => + BRIEF_KINDS.has(d.kind), + ); const symbols = defs.map((d) => { const callerCount = countTransitiveCallers(db, d.id, noTests, callerDepth); return { diff --git a/src/domain/analysis/context.ts b/src/domain/analysis/context.ts index 62c0284c..2c3f5ed2 100644 --- a/src/domain/analysis/context.ts +++ b/src/domain/analysis/context.ts @@ -13,8 +13,11 @@ import { findNodeChildren, findNodesByFile, getComplexityForNode, + getLineCountForNode, + getMaxEndLineForFile, openReadonlyOrFail, } from '../../db/index.js'; +import { cachedStmt } from '../../db/repository/cached-stmt.js'; import { loadConfig } from '../../infrastructure/config.js'; import { debug } from '../../infrastructure/logger.js'; import { isTestFile } from '../../infrastructure/test-filter.js'; @@ -28,20 +31,34 @@ import { import { resolveMethodViaHierarchy } from '../../shared/hierarchy.js'; import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { + BetterSqlite3Database, + ChildNodeRow, + ImportEdgeRow, + IntraFileCallEdge, + NodeRow, + RelatedNodeRow, + StmtCache, +} from '../../types.js'; import { findMatchingNodes } from './symbol-lookup.js'; +interface DisplayOpts { + maxLines?: number; + [key: string]: unknown; +} + function buildCallees( - db: any, - node: any, + db: BetterSqlite3Database, + node: NodeRow, repoRoot: string, getFileLines: (file: string) => string[] | null, - opts: { noTests: boolean; depth: number; displayOpts: Record }, -): any[] { + opts: { noTests: boolean; depth: number; displayOpts: DisplayOpts }, +) { const { noTests, depth, displayOpts } = opts; - const calleeRows = findCallees(db, node.id); - const filteredCallees = noTests ? calleeRows.filter((c: any) => !isTestFile(c.file)) : calleeRows; + const calleeRows = findCallees(db, node.id) as RelatedNodeRow[]; + const filteredCallees = noTests ? calleeRows.filter((c) => !isTestFile(c.file)) : calleeRows; - const callees = filteredCallees.map((c: any) => { + const callees = filteredCallees.map((c) => { const cLines = getFileLines(c.file); const summary = cLines ? extractSummary(cLines, c.line, displayOpts) : null; let calleeSource: string | null = null; @@ -60,14 +77,14 @@ function buildCallees( }); if (depth > 1) { - const visited = new Set(filteredCallees.map((c: any) => c.id)); + const visited = new Set(filteredCallees.map((c) => c.id)); visited.add(node.id); - let frontier = filteredCallees.map((c: any) => c.id); + let frontier = filteredCallees.map((c) => c.id); const maxDepth = Math.min(depth, 5); for (let d = 2; d <= maxDepth; d++) { const nextFrontier: number[] = []; for (const fid of frontier) { - const deeper = findCallees(db, fid); + const deeper = findCallees(db, fid) as RelatedNodeRow[]; for (const c of deeper) { if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { visited.add(c.id); @@ -93,21 +110,24 @@ function buildCallees( return callees; } -function buildCallers(db: any, node: any, noTests: boolean): any[] { - let callerRows = findCallers(db, node.id); +function buildCallers(db: BetterSqlite3Database, node: NodeRow, noTests: boolean) { + let callerRows: Array = findCallers( + db, + node.id, + ) as RelatedNodeRow[]; if (node.kind === 'method' && node.name.includes('.')) { - const methodName = node.name.split('.').pop(); + const methodName = node.name.split('.').pop() ?? ''; const relatedMethods = resolveMethodViaHierarchy(db, methodName); for (const rm of relatedMethods) { if (rm.id === node.id) continue; - const extraCallers = findCallers(db, rm.id); - callerRows.push(...extraCallers.map((c: any) => ({ ...c, viaHierarchy: rm.name }))); + const extraCallers = findCallers(db, rm.id) as RelatedNodeRow[]; + callerRows.push(...extraCallers.map((c) => ({ ...c, viaHierarchy: rm.name }))); } } - if (noTests) callerRows = callerRows.filter((c: any) => !isTestFile(c.file)); + if (noTests) callerRows = callerRows.filter((c) => !isTestFile(c.file)); - return callerRows.map((c: any) => ({ + return callerRows.map((c) => ({ name: c.name, kind: c.kind, file: c.file, @@ -119,32 +139,22 @@ function buildCallers(db: any, node: any, noTests: boolean): any[] { const INTERFACE_LIKE_KINDS = new Set(['interface', 'trait']); const IMPLEMENTOR_KINDS = new Set(['class', 'struct', 'record', 'enum']); -function buildImplementationInfo(db: any, node: any, noTests: boolean): object { +function buildImplementationInfo(db: BetterSqlite3Database, node: NodeRow, noTests: boolean) { // For interfaces/traits: show who implements them if (INTERFACE_LIKE_KINDS.has(node.kind)) { - let impls = findImplementors(db, node.id); - if (noTests) impls = impls.filter((n: any) => !isTestFile(n.file)); + let impls = findImplementors(db, node.id) as RelatedNodeRow[]; + if (noTests) impls = impls.filter((n) => !isTestFile(n.file)); return { - implementors: impls.map((n: any) => ({ - name: n.name, - kind: n.kind, - file: n.file, - line: n.line, - })), + implementors: impls.map((n) => ({ name: n.name, kind: n.kind, file: n.file, line: n.line })), }; } // For classes/structs: show what they implement if (IMPLEMENTOR_KINDS.has(node.kind)) { - let ifaces = findInterfaces(db, node.id); - if (noTests) ifaces = ifaces.filter((n: any) => !isTestFile(n.file)); + let ifaces = findInterfaces(db, node.id) as RelatedNodeRow[]; + if (noTests) ifaces = ifaces.filter((n) => !isTestFile(n.file)); if (ifaces.length > 0) { return { - implements: ifaces.map((n: any) => ({ - name: n.name, - kind: n.kind, - file: n.file, - line: n.line, - })), + implements: ifaces.map((n) => ({ name: n.name, kind: n.kind, file: n.file, line: n.line })), }; } } @@ -152,28 +162,33 @@ function buildImplementationInfo(db: any, node: any, noTests: boolean): object { } function buildRelatedTests( - db: any, - node: any, + db: BetterSqlite3Database, + node: NodeRow, getFileLines: (file: string) => string[] | null, includeTests: boolean, -): any[] { - const testCallerRows = findCallers(db, node.id); - const testCallers = testCallerRows.filter((c: any) => isTestFile(c.file)); +) { + const testCallerRows = findCallers(db, node.id) as RelatedNodeRow[]; + const testCallers = testCallerRows.filter((c) => isTestFile(c.file)); - const testsByFile = new Map(); + const testsByFile = new Map(); for (const tc of testCallers) { if (!testsByFile.has(tc.file)) testsByFile.set(tc.file, []); - testsByFile.get(tc.file)?.push(tc); + testsByFile.get(tc.file)!.push(tc); } - const relatedTests: any[] = []; + const relatedTests: Array<{ + file: string; + testCount: number; + testNames: string[]; + source?: string; + }> = []; for (const [file] of testsByFile) { const tLines = getFileLines(file); const testNames: string[] = []; if (tLines) { for (const tl of tLines) { const tm = tl.match(/(?:it|test|describe)\s*\(\s*['"`]([^'"`]+)['"`]/); - if (tm && tm[1]) testNames.push(tm[1]); + if (tm) testNames.push(tm[1]!); } } const testSource = includeTests && tLines ? tLines.join('\n') : undefined; @@ -188,7 +203,7 @@ function buildRelatedTests( return relatedTests; } -function getComplexityMetrics(db: any, nodeId: number): object | null { +function getComplexityMetrics(db: BetterSqlite3Database, nodeId: number) { try { const cRow = getComplexityForNode(db, nodeId); if (!cRow) return null; @@ -199,43 +214,43 @@ function getComplexityMetrics(db: any, nodeId: number): object | null { maintainabilityIndex: cRow.maintainability_index || 0, halsteadVolume: cRow.halstead_volume || 0, }; - } catch (e: any) { - debug(`complexity lookup failed for node ${nodeId}: ${e.message}`); + } catch (e: unknown) { + debug(`complexity lookup failed for node ${nodeId}: ${(e as Error).message}`); return null; } } -function getNodeChildrenSafe(db: any, nodeId: number): any[] { +function getNodeChildrenSafe(db: BetterSqlite3Database, nodeId: number) { try { - return findNodeChildren(db, nodeId).map((c: any) => ({ + return (findNodeChildren(db, nodeId) as ChildNodeRow[]).map((c) => ({ name: c.name, kind: c.kind, line: c.line, endLine: c.end_line || null, })); - } catch (e: any) { - debug(`findNodeChildren failed for node ${nodeId}: ${e.message}`); + } catch (e: unknown) { + debug(`findNodeChildren failed for node ${nodeId}: ${(e as Error).message}`); return []; } } function explainFileImpl( - db: any, + db: BetterSqlite3Database, target: string, getFileLines: (file: string) => string[] | null, - displayOpts: Record, -): any[] { - const fileNodes = findFileNodes(db, `%${target}%`); + displayOpts: DisplayOpts, +) { + const fileNodes = findFileNodes(db, `%${target}%`) as NodeRow[]; if (fileNodes.length === 0) return []; - return fileNodes.map((fn: any) => { - const symbols = findNodesByFile(db, fn.file); + return fileNodes.map((fn) => { + const symbols = findNodesByFile(db, fn.file) as NodeRow[]; // IDs of symbols that have incoming calls from other files (public) - const publicIds = findCrossFileCallTargets(db, fn.file); + const publicIds = findCrossFileCallTargets(db, fn.file) as Set; const fileLines = getFileLines(fn.file); - const mapSymbol = (s: any) => ({ + const mapSymbol = (s: NodeRow) => ({ name: s.name, kind: s.kind, line: s.line, @@ -244,31 +259,31 @@ function explainFileImpl( signature: fileLines ? extractSignature(fileLines, s.line, displayOpts) : null, }); - const publicApi = symbols.filter((s: any) => publicIds.has(s.id)).map(mapSymbol); - const internal = symbols.filter((s: any) => !publicIds.has(s.id)).map(mapSymbol); + const publicApi = symbols.filter((s) => publicIds.has(s.id)).map(mapSymbol); + const internal = symbols.filter((s) => !publicIds.has(s.id)).map(mapSymbol); - const imports = findImportTargets(db, fn.id).map((r: any) => ({ file: r.file })); - const importedBy = findImportSources(db, fn.id).map((r: any) => ({ file: r.file })); + const imports = (findImportTargets(db, fn.id) as ImportEdgeRow[]).map((r) => ({ + file: r.file, + })); + const importedBy = (findImportSources(db, fn.id) as ImportEdgeRow[]).map((r) => ({ + file: r.file, + })); - const intraEdges = findIntraFileCallEdges(db, fn.file); + const intraEdges = findIntraFileCallEdges(db, fn.file) as IntraFileCallEdge[]; const dataFlowMap = new Map(); for (const edge of intraEdges) { if (!dataFlowMap.has(edge.caller_name)) dataFlowMap.set(edge.caller_name, []); - dataFlowMap.get(edge.caller_name)?.push(edge.callee_name); + dataFlowMap.get(edge.caller_name)!.push(edge.callee_name); } const dataFlow = [...dataFlowMap.entries()].map(([caller, callees]) => ({ caller, callees, })); - const metric = db - .prepare(`SELECT nm.line_count FROM node_metrics nm WHERE nm.node_id = ?`) - .get(fn.id) as any; - let lineCount = metric?.line_count || null; + const metric = getLineCountForNode(db, fn.id) as { line_count: number } | undefined; + let lineCount: number | null = metric?.line_count || null; if (!lineCount) { - const maxLine = db - .prepare(`SELECT MAX(end_line) as max_end FROM nodes WHERE file = ?`) - .get(fn.file) as any; + const maxLine = getMaxEndLineForFile(db, fn.file) as { max_end: number | null } | undefined; lineCount = maxLine?.max_end || null; } @@ -285,48 +300,49 @@ function explainFileImpl( }); } +const _explainNodeStmtCache: StmtCache = new WeakMap(); +const _EXPLAIN_NODE_SQL = `SELECT * FROM nodes WHERE name LIKE ? AND kind IN ('function','method','class','interface','type','struct','enum','trait','record','module','constant') ORDER BY file, line`; + function explainFunctionImpl( - db: any, + db: BetterSqlite3Database, target: string, noTests: boolean, getFileLines: (file: string) => string[] | null, - displayOpts: Record, -): any[] { - let nodes = db - .prepare( - `SELECT * FROM nodes WHERE name LIKE ? AND kind IN ('function','method','class','interface','type','struct','enum','trait','record','module','constant') ORDER BY file, line`, - ) - .all(`%${target}%`) as any[]; - if (noTests) nodes = nodes.filter((n: any) => !isTestFile(n.file)); + displayOpts: DisplayOpts, +) { + const stmt = cachedStmt(_explainNodeStmtCache, db, _EXPLAIN_NODE_SQL); + let nodes = stmt.all(`%${target}%`) as NodeRow[]; + if (noTests) nodes = nodes.filter((n) => !isTestFile(n.file)); if (nodes.length === 0) return []; const hc = new Map(); - return nodes.slice(0, 10).map((node: any) => { + return nodes.slice(0, 10).map((node) => { const fileLines = getFileLines(node.file); const lineCount = node.end_line ? node.end_line - node.line + 1 : null; const summary = fileLines ? extractSummary(fileLines, node.line, displayOpts) : null; const signature = fileLines ? extractSignature(fileLines, node.line, displayOpts) : null; - const callees = findCallees(db, node.id).map((c: any) => ({ + const callees = (findCallees(db, node.id) as RelatedNodeRow[]).map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line, })); - let callers = findCallers(db, node.id).map((c: any) => ({ + const allCallerRows = findCallers(db, node.id) as RelatedNodeRow[]; + + let callers = allCallerRows.map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line, })); - if (noTests) callers = callers.filter((c: any) => !isTestFile(c.file)); + if (noTests) callers = callers.filter((c) => !isTestFile(c.file)); - const testCallerRows = findCallers(db, node.id); const seenFiles = new Set(); - const relatedTests = testCallerRows - .filter((r: any) => isTestFile(r.file) && !seenFiles.has(r.file) && seenFiles.add(r.file)) - .map((r: any) => ({ file: r.file })); + const relatedTests = allCallerRows + .filter((r) => isTestFile(r.file) && !seenFiles.has(r.file) && seenFiles.add(r.file)) + .map((r) => ({ file: r.file })); return { ...normalizeSymbol(node, db, hc), @@ -341,18 +357,19 @@ function explainFunctionImpl( }); } +// biome-ignore lint/suspicious/noExplicitAny: explainFunctionImpl results have dynamic shape with _depth function explainCallees( parentResults: any[], currentDepth: number, visited: Set, - db: any, + db: BetterSqlite3Database, noTests: boolean, getFileLines: (file: string) => string[] | null, - displayOpts: Record, + displayOpts: DisplayOpts, ): void { if (currentDepth <= 0) return; for (const r of parentResults) { - const newCallees: any[] = []; + const newCallees: typeof parentResults = []; for (const callee of r.callees) { const key = `${callee.name}:${callee.file}:${callee.line}`; if (visited.has(key)) continue; @@ -364,11 +381,10 @@ function explainCallees( getFileLines, displayOpts, ); - const exact = calleeResults.find( - (cr: any) => cr.file === callee.file && cr.line === callee.line, - ); + const exact = calleeResults.find((cr) => cr.file === callee.file && cr.line === callee.line); if (exact) { - exact._depth = (r._depth || 0) + 1; + (exact as Record)['_depth'] = + (((r as Record)['_depth'] as number) || 0) + 1; newCallees.push(exact); } } @@ -379,23 +395,24 @@ function explainCallees( } } -// ─── Exported functions ────────────────────────────────────────────────── +// --- Exported functions --- export function contextData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { depth?: number; noSource?: boolean; noTests?: boolean; includeTests?: boolean; - config?: any; file?: string; kind?: string; limit?: number; offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const depth = opts.depth || 0; @@ -404,7 +421,7 @@ export function contextData( const includeTests = opts.includeTests || false; const config = opts.config || loadConfig(); - const displayOpts = config.display || {}; + const displayOpts: DisplayOpts = config.display || {}; const dbPath = findDbPath(customDbPath); const repoRoot = path.resolve(path.dirname(dbPath), '..'); @@ -416,7 +433,7 @@ export function contextData( const getFileLines = createFileLinesReader(repoRoot); - const results = nodes.map((node: any) => { + const results = nodes.map((node) => { const fileLines = getFileLines(node.file); const source = noSource @@ -463,9 +480,16 @@ export function contextData( export function explainData( target: string, - customDbPath: string | undefined, - opts: { noTests?: boolean; depth?: number; config?: any; limit?: number; offset?: number } = {}, -): object { + customDbPath: string, + opts: { + noTests?: boolean; + depth?: number; + limit?: number; + offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; + } = {}, +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -473,7 +497,7 @@ export function explainData( const kind = isFileLikeTarget(target) ? 'file' : 'function'; const config = opts.config || loadConfig(); - const displayOpts = config.display || {}; + const displayOpts: DisplayOpts = config.display || {}; const dbPath = findDbPath(customDbPath); const repoRoot = path.resolve(path.dirname(dbPath), '..'); @@ -486,7 +510,8 @@ export function explainData( : explainFunctionImpl(db, target, noTests, getFileLines, displayOpts); if (kind === 'function' && depth > 0 && results.length > 0) { - const visited = new Set(results.map((r: any) => `${r.name}:${r.file}:${r.line}`)); + // biome-ignore lint/suspicious/noExplicitAny: results are function results when kind === 'function' + const visited = new Set(results.map((r: any) => `${r.name}:${r.file}:${r.line ?? ''}`)); explainCallees(results, depth, visited, db, noTests, getFileLines, displayOpts); } diff --git a/src/domain/analysis/dependencies.ts b/src/domain/analysis/dependencies.ts index c0131f47..def5c965 100644 --- a/src/domain/analysis/dependencies.ts +++ b/src/domain/analysis/dependencies.ts @@ -11,36 +11,34 @@ import { isTestFile } from '../../infrastructure/test-filter.js'; import { resolveMethodViaHierarchy } from '../../shared/hierarchy.js'; import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { BetterSqlite3Database, ImportEdgeRow, NodeRow, RelatedNodeRow } from '../../types.js'; import { findMatchingNodes } from './symbol-lookup.js'; export function fileDepsData( file: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; - const fileNodes = findFileNodes(db, `%${file}%`); + const fileNodes = findFileNodes(db, `%${file}%`) as NodeRow[]; if (fileNodes.length === 0) { return { file, results: [] }; } const results = fileNodes.map((fn) => { - let importsTo = findImportTargets(db, fn.id); + let importsTo = findImportTargets(db, fn.id) as ImportEdgeRow[]; if (noTests) importsTo = importsTo.filter((i) => !isTestFile(i.file)); - let importedBy = findImportSources(db, fn.id); + let importedBy = findImportSources(db, fn.id) as ImportEdgeRow[]; if (noTests) importedBy = importedBy.filter((i) => !isTestFile(i.file)); - const defs = findNodesByFile(db, fn.file); + const defs = findNodesByFile(db, fn.file) as NodeRow[]; return { file: fn.file, - imports: importsTo.map((i) => ({ - file: i.file, - typeOnly: i.edge_kind === 'imports-type', - })), + imports: importsTo.map((i) => ({ file: i.file, typeOnly: i.edge_kind === 'imports-type' })), importedBy: importedBy.map((i) => ({ file: i.file })), definitions: defs.map((d) => ({ name: d.name, kind: d.kind, line: d.line })), }; @@ -55,55 +53,46 @@ export function fileDepsData( /** * BFS transitive caller traversal starting from `callers` of `nodeId`. - * Returns an object keyed by depth (2..depth) → array of caller descriptors. + * Returns an object keyed by depth (2..depth) -> array of caller descriptors. */ function buildTransitiveCallers( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, - // biome-ignore lint/suspicious/noExplicitAny: caller row shape varies - callers: any[], + db: BetterSqlite3Database, + callers: Array<{ id: number; name: string; kind: string; file: string; line: number }>, nodeId: number, depth: number, noTests: boolean, - // biome-ignore lint/suspicious/noExplicitAny: caller row shape varies -): Record { - // biome-ignore lint/suspicious/noExplicitAny: caller row shape varies - const transitiveCallers: Record = {}; +) { + const transitiveCallers: Record< + number, + Array<{ name: string; kind: string; file: string; line: number }> + > = {}; if (depth <= 1) return transitiveCallers; const visited = new Set([nodeId]); - let frontier = callers - .map((c) => { - const row = db - .prepare('SELECT id FROM nodes WHERE name = ? AND kind = ? AND file = ? AND line = ?') - // biome-ignore lint/suspicious/noExplicitAny: DB row type - .get(c.name, c.kind, c.file, c.line) as any; - return row ? { ...c, id: row.id } : null; - }) - // biome-ignore lint/suspicious/noExplicitAny: filtering nulls - .filter(Boolean) as any[]; + let frontier = callers; + + const upstreamStmt = db.prepare(` + SELECT n.id, n.name, n.kind, n.file, n.line + FROM edges e JOIN nodes n ON e.source_id = n.id + WHERE e.target_id = ? AND e.kind = 'calls' + `); for (let d = 2; d <= depth; d++) { - // biome-ignore lint/suspicious/noExplicitAny: caller row shape varies - const nextFrontier: any[] = []; + const nextFrontier: typeof frontier = []; for (const f of frontier) { if (visited.has(f.id)) continue; visited.add(f.id); - const upstream = db - .prepare(` - SELECT n.name, n.kind, n.file, n.line - FROM edges e JOIN nodes n ON e.source_id = n.id - WHERE e.target_id = ? AND e.kind = 'calls' - `) - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - .all(f.id) as any[]; + const upstream = upstreamStmt.all(f.id) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + }>; for (const u of upstream) { if (noTests && isTestFile(u.file)) continue; - const uid = db - .prepare('SELECT id FROM nodes WHERE name = ? AND kind = ? AND file = ? AND line = ?') - .get(u.name, u.kind, u.file, u.line)?.id; - if (uid && !visited.has(uid)) { - nextFrontier.push({ ...u, id: uid }); + if (!visited.has(u.id)) { + nextFrontier.push(u); } } } @@ -124,16 +113,16 @@ function buildTransitiveCallers( export function fnDepsData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { + depth?: number; noTests?: boolean; file?: string; kind?: string; - depth?: number; limit?: number; offset?: number; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const depth = opts.depth || 3; @@ -146,17 +135,20 @@ export function fnDepsData( } const results = nodes.map((node) => { - const callees = findCallees(db, node.id); + const callees = findCallees(db, node.id) as RelatedNodeRow[]; const filteredCallees = noTests ? callees.filter((c) => !isTestFile(c.file)) : callees; - let callers = findCallers(db, node.id); + let callers: Array = findCallers( + db, + node.id, + ) as RelatedNodeRow[]; if (node.kind === 'method' && node.name.includes('.')) { - const methodName = node.name.split('.').pop(); + const methodName = node.name.split('.').pop()!; const relatedMethods = resolveMethodViaHierarchy(db, methodName); for (const rm of relatedMethods) { if (rm.id === node.id) continue; - const extraCallers = findCallers(db, rm.id); + const extraCallers = findCallers(db, rm.id) as RelatedNodeRow[]; callers.push(...extraCallers.map((c) => ({ ...c, viaHierarchy: rm.name }))); } } @@ -177,7 +169,7 @@ export function fnDepsData( kind: c.kind, file: c.file, line: c.line, - viaHierarchy: 'viaHierarchy' in c ? (c.viaHierarchy as string) : undefined, + viaHierarchy: c.viaHierarchy || undefined, })), transitiveCallers, }; @@ -196,22 +188,11 @@ export function fnDepsData( * or { earlyResult } when a caller-facing error/not-found response should be returned immediately. */ function resolveEndpoints( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, + db: BetterSqlite3Database, from: string, to: string, opts: { noTests?: boolean; fromFile?: string; toFile?: string; kind?: string }, -): { - // biome-ignore lint/suspicious/noExplicitAny: node row shape varies - sourceNode?: any; - // biome-ignore lint/suspicious/noExplicitAny: node row shape varies - targetNode?: any; - // biome-ignore lint/suspicious/noExplicitAny: node row shape varies - fromCandidates?: any[]; - // biome-ignore lint/suspicious/noExplicitAny: node row shape varies - toCandidates?: any[]; - earlyResult?: object; -} { +) { const { noTests = false } = opts; const fromNodes = findMatchingNodes(db, from, { @@ -270,26 +251,21 @@ function resolveEndpoints( /** * BFS from sourceId toward targetId. * Returns { found, parent, alternateCount, foundDepth }. - * `parent` maps nodeId → { parentId, edgeKind }. + * `parent` maps nodeId -> { parentId, edgeKind }. */ function bfsShortestPath( - db: any, + db: BetterSqlite3Database, sourceId: number, targetId: number, edgeKinds: string[], reverse: boolean, maxDepth: number, noTests: boolean, -): { - found: boolean; - parent: Map; - alternateCount: number; - foundDepth: number; -} { +) { const kindPlaceholders = edgeKinds.map(() => '?').join(', '); - // Forward: source_id → target_id (A calls... calls B) - // Reverse: target_id → source_id (B is called by... called by A) + // Forward: source_id -> target_id (A calls... calls B) + // Reverse: target_id -> source_id (B is called by... called by A) const neighborQuery = reverse ? `SELECT n.id, n.name, n.kind, n.file, n.line, e.kind AS edge_kind FROM edges e JOIN nodes n ON e.source_id = n.id @@ -309,7 +285,14 @@ function bfsShortestPath( for (let depth = 1; depth <= maxDepth; depth++) { const nextQueue: number[] = []; for (const currentId of queue) { - const neighbors = neighborStmt.all(currentId, ...edgeKinds) as any[]; + const neighbors = neighborStmt.all(currentId, ...edgeKinds) as Array<{ + id: number; + name: string; + kind: string; + file: string; + line: number; + edge_kind: string; + }>; for (const n of neighbors) { if (noTests && isTestFile(n.file)) continue; if (n.id === targetId) { @@ -338,24 +321,30 @@ function bfsShortestPath( /** * Walk the parent map from targetId back to sourceId and return an ordered - * array of node IDs source → target. + * array of node IDs source -> target. */ function reconstructPath( - db: any, + db: BetterSqlite3Database, pathIds: number[], parent: Map, -): any[] { - const nodeCache = new Map(); +) { + const nodeCache = new Map(); + const nodeByIdStmt = db.prepare('SELECT name, kind, file, line FROM nodes WHERE id = ?'); const getNode = (id: number) => { - if (nodeCache.has(id)) return nodeCache.get(id); - const row = db.prepare('SELECT name, kind, file, line FROM nodes WHERE id = ?').get(id); + if (nodeCache.has(id)) return nodeCache.get(id)!; + const row = nodeByIdStmt.get(id) as { + name: string; + kind: string; + file: string; + line: number; + }; nodeCache.set(id, row); return row; }; return pathIds.map((id, idx) => { const node = getNode(id); - const edgeKind = idx === 0 ? null : parent.get(id)?.edgeKind; + const edgeKind = idx === 0 ? null : parent.get(id)!.edgeKind; return { name: node.name, kind: node.kind, file: node.file, line: node.line, edgeKind }; }); } @@ -363,7 +352,7 @@ function reconstructPath( export function pathData( from: string, to: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; maxDepth?: number; @@ -373,7 +362,7 @@ export function pathData( toFile?: string; kind?: string; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -387,12 +376,12 @@ export function pathData( toFile: opts.toFile, kind: opts.kind, }); - if (resolved.earlyResult) return resolved.earlyResult; + if ('earlyResult' in resolved) return resolved.earlyResult; const { sourceNode, targetNode, fromCandidates, toCandidates } = resolved; // Self-path - if (sourceNode.id === targetNode.id) { + if (sourceNode!.id === targetNode!.id) { return { from, to, @@ -402,10 +391,10 @@ export function pathData( hops: 0, path: [ { - name: sourceNode.name, - kind: sourceNode.kind, - file: sourceNode.file, - line: sourceNode.line, + name: sourceNode!.name, + kind: sourceNode!.kind, + file: sourceNode!.file, + line: sourceNode!.line, edgeKind: null, }, ], @@ -421,7 +410,7 @@ export function pathData( parent, alternateCount: rawAlternateCount, foundDepth, - } = bfsShortestPath(db, sourceNode.id, targetNode.id, edgeKinds, reverse, maxDepth, noTests); + } = bfsShortestPath(db, sourceNode!.id, targetNode!.id, edgeKinds, reverse, maxDepth, noTests); if (!found) { return { @@ -443,9 +432,9 @@ export function pathData( const alternateCount = Math.max(0, rawAlternateCount - 1); // Reconstruct path from target back to source - const pathIds = [targetNode.id]; - let cur = targetNode.id; - while (cur !== sourceNode.id) { + const pathIds = [targetNode!.id]; + let cur = targetNode!.id; + while (cur !== sourceNode!.id) { const p = parent.get(cur)!; pathIds.push(p.parentId); cur = p.parentId; diff --git a/src/domain/analysis/exports.ts b/src/domain/analysis/exports.ts index 92e74099..3bf0f959 100644 --- a/src/domain/analysis/exports.ts +++ b/src/domain/analysis/exports.ts @@ -15,13 +15,23 @@ import { extractSummary, } from '../../shared/file-utils.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { BetterSqlite3Database, NodeRow } from '../../types.js'; + +/** Cache the schema probe for the `exported` column per db handle. */ +const _hasExportedColCache: WeakMap = new WeakMap(); export function exportsData( file: string, - customDbPath: string | undefined, - // biome-ignore lint/suspicious/noExplicitAny: config shape not yet typed - opts: { noTests?: boolean; config?: any; unused?: boolean; limit?: number; offset?: number } = {}, -): object { + customDbPath: string, + opts: { + noTests?: boolean; + unused?: boolean; + limit?: number; + offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; + } = {}, +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -56,7 +66,7 @@ export function exportsData( } // For single-file match return flat; for multi-match return first (like explainData) - const first = fileResults[0]; + const first = fileResults[0]!; const base = { file: first.file, results: first.results, @@ -68,7 +78,7 @@ export function exportsData( totalReexported: first.totalReexported, totalReexportedUnused: first.totalReexportedUnused, }; - // biome-ignore lint/suspicious/noExplicitAny: dynamic pagination shape + // biome-ignore lint/suspicious/noExplicitAny: paginateResult returns dynamic shape const paginated: any = paginateResult(base, 'results', { limit: opts.limit, offset: opts.offset, @@ -93,56 +103,67 @@ export function exportsData( } function exportsFileImpl( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, + db: BetterSqlite3Database, target: string, noTests: boolean, getFileLines: (file: string) => string[] | null, unused: boolean, - // biome-ignore lint/suspicious/noExplicitAny: display config shape not yet typed - displayOpts: Record, - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated -): any[] { - const fileNodes = findFileNodes(db, `%${target}%`); + displayOpts: Record, +) { + const fileNodes = findFileNodes(db, `%${target}%`) as NodeRow[]; if (fileNodes.length === 0) return []; - // Detect whether exported column exists - let hasExportedCol = false; - try { - db.prepare('SELECT exported FROM nodes LIMIT 0').raw(); - hasExportedCol = true; - } catch (e: unknown) { - debug(`exported column not available, using fallback: ${e instanceof Error ? e.message : e}`); + // Detect whether exported column exists (cached per db handle) + let hasExportedCol: boolean; + if (_hasExportedColCache.has(db)) { + hasExportedCol = _hasExportedColCache.get(db)!; + } else { + hasExportedCol = false; + try { + db.prepare('SELECT exported FROM nodes LIMIT 0').raw(true); + hasExportedCol = true; + } catch (e: unknown) { + debug(`exported column not available, using fallback: ${(e as Error).message}`); + } + _hasExportedColCache.set(db, hasExportedCol); } + const exportedNodesStmt = db.prepare( + "SELECT * FROM nodes WHERE file = ? AND kind != 'file' AND exported = 1 ORDER BY line", + ); + const consumersStmt = db.prepare( + `SELECT n.name, n.file, n.line FROM edges e JOIN nodes n ON e.source_id = n.id + WHERE e.target_id = ? AND e.kind = 'calls'`, + ); + const reexportsFromStmt = db.prepare( + `SELECT DISTINCT n.file FROM edges e JOIN nodes n ON e.source_id = n.id + WHERE e.target_id = ? AND e.kind = 'reexports'`, + ); + const reexportsToStmt = db.prepare( + `SELECT DISTINCT n.file FROM edges e JOIN nodes n ON e.target_id = n.id + WHERE e.source_id = ? AND e.kind = 'reexports'`, + ); + return fileNodes.map((fn) => { - const symbols = findNodesByFile(db, fn.file); + const symbols = findNodesByFile(db, fn.file) as NodeRow[]; - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - let exported: any[]; + let exported: NodeRow[]; if (hasExportedCol) { // Use the exported column populated during build - exported = db - .prepare( - "SELECT * FROM nodes WHERE file = ? AND kind != 'file' AND exported = 1 ORDER BY line", - ) - .all(fn.file); + exported = exportedNodesStmt.all(fn.file) as NodeRow[]; } else { // Fallback: symbols that have incoming calls from other files - const exportedIds = findCrossFileCallTargets(db, fn.file); + const exportedIds = findCrossFileCallTargets(db, fn.file) as Set; exported = symbols.filter((s) => exportedIds.has(s.id)); } const internalCount = symbols.length - exported.length; - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - const buildSymbolResult = (s: any, fileLines: string[] | null) => { - let consumers = db - .prepare( - `SELECT n.name, n.file, n.line FROM edges e JOIN nodes n ON e.source_id = n.id - WHERE e.target_id = ? AND e.kind = 'calls'`, - ) - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - .all(s.id) as any[]; + const buildSymbolResult = (s: NodeRow, fileLines: string[] | null) => { + let consumers = consumersStmt.all(s.id) as Array<{ + name: string; + file: string; + line: number; + }>; if (noTests) consumers = consumers.filter((c) => !isTestFile(c.file)); return { @@ -162,47 +183,31 @@ function exportsFileImpl( const totalUnused = results.filter((r) => r.consumerCount === 0).length; - // Files that re-export this file (barrel → this file) - const reexports = ( - db - .prepare( - `SELECT DISTINCT n.file FROM edges e JOIN nodes n ON e.source_id = n.id - WHERE e.target_id = ? AND e.kind = 'reexports'`, - ) - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - .all(fn.id) as any[] - ).map((r) => ({ file: r.file })); + // Files that re-export this file (barrel -> this file) + const reexports = (reexportsFromStmt.all(fn.id) as Array<{ file: string }>).map((r) => ({ + file: r.file, + })); // For barrel files: gather symbols re-exported from target modules - const reexportTargets = db - .prepare( - `SELECT DISTINCT n.file FROM edges e JOIN nodes n ON e.target_id = n.id - WHERE e.source_id = ? AND e.kind = 'reexports'`, - ) - .all(fn.id); - - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - const reexportedSymbols: any[] = []; - for (const target of reexportTargets) { - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - let targetExported: any[]; + const reexportTargets = reexportsToStmt.all(fn.id) as Array<{ file: string }>; + + const reexportedSymbols: Array & { originFile: string }> = + []; + for (const reexTarget of reexportTargets) { + let targetExported: NodeRow[]; if (hasExportedCol) { - targetExported = db - .prepare( - "SELECT * FROM nodes WHERE file = ? AND kind != 'file' AND exported = 1 ORDER BY line", - ) - .all(target.file); + targetExported = exportedNodesStmt.all(reexTarget.file) as NodeRow[]; } else { // Fallback: same heuristic as direct exports — symbols called from other files - const targetSymbols = findNodesByFile(db, target.file); - const exportedIds = findCrossFileCallTargets(db, target.file); + const targetSymbols = findNodesByFile(db, reexTarget.file) as NodeRow[]; + const exportedIds = findCrossFileCallTargets(db, reexTarget.file) as Set; targetExported = targetSymbols.filter((s) => exportedIds.has(s.id)); } for (const s of targetExported) { - const fileLines = getFileLines(target.file); + const fileLines = getFileLines(reexTarget.file); reexportedSymbols.push({ ...buildSymbolResult(s, fileLines), - originFile: target.file, + originFile: reexTarget.file, }); } } diff --git a/src/domain/analysis/impact.ts b/src/domain/analysis/impact.ts index 8b822409..2e805ffc 100644 --- a/src/domain/analysis/impact.ts +++ b/src/domain/analysis/impact.ts @@ -18,9 +18,10 @@ import { debug } from '../../infrastructure/logger.js'; import { isTestFile } from '../../infrastructure/test-filter.js'; import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { BetterSqlite3Database, NodeRow, RelatedNodeRow } from '../../types.js'; import { findMatchingNodes } from './symbol-lookup.js'; -// ─── Shared BFS: transitive callers ──────────────────────────────────── +// --- Shared BFS: transitive callers --- const INTERFACE_LIKE_KINDS = new Set(['interface', 'trait']); @@ -28,8 +29,8 @@ const INTERFACE_LIKE_KINDS = new Set(['interface', 'trait']); * Check whether the graph contains any 'implements' edges. * Cached per db handle so the query runs at most once per connection. */ -const _hasImplementsCache = new WeakMap(); -function hasImplementsEdges(db: any): boolean { +const _hasImplementsCache: WeakMap = new WeakMap(); +function hasImplementsEdges(db: BetterSqlite3Database): boolean { if (_hasImplementsCache.has(db)) return _hasImplementsCache.get(db)!; const row = db.prepare("SELECT 1 FROM edges WHERE kind = 'implements' LIMIT 1").get(); const result = !!row; @@ -44,7 +45,7 @@ function hasImplementsEdges(db: any): boolean { * so that changes to an interface signature propagate to all implementors. */ export function bfsTransitiveCallers( - db: any, + db: BetterSqlite3Database, startId: number, { noTests = false, @@ -55,23 +56,30 @@ export function bfsTransitiveCallers( noTests?: boolean; maxDepth?: number; includeImplementors?: boolean; - onVisit?: (caller: any, parentId: number, depth: number) => void; + onVisit?: ( + caller: RelatedNodeRow & { viaImplements?: boolean }, + parentId: number, + depth: number, + ) => void; } = {}, -): { totalDependents: number; levels: Record } { +) { // Skip all implementor lookups when the graph has no implements edges const resolveImplementors = includeImplementors && hasImplementsEdges(db); const visited = new Set([startId]); - const levels: Record = {}; + const levels: Record< + number, + Array<{ name: string; kind: string; file: string; line: number; viaImplements?: boolean }> + > = {}; let frontier = [startId]; // Seed: if start node is an interface/trait, include its implementors at depth 1. // Implementors go into a separate list so their callers appear at depth 2, not depth 1. const implNextFrontier: number[] = []; if (resolveImplementors) { - const startNode = findNodeById(db, startId); + const startNode = findNodeById(db, startId) as NodeRow | undefined; if (startNode && INTERFACE_LIKE_KINDS.has(startNode.kind)) { - const impls = findImplementors(db, startId); + const impls = findImplementors(db, startId) as RelatedNodeRow[]; for (const impl of impls) { if (!visited.has(impl.id) && (!noTests || !isTestFile(impl.file))) { visited.add(impl.id); @@ -97,12 +105,12 @@ export function bfsTransitiveCallers( } const nextFrontier: number[] = []; for (const fid of frontier) { - const callers = findDistinctCallers(db, fid); + const callers = findDistinctCallers(db, fid) as RelatedNodeRow[]; for (const c of callers) { if (!visited.has(c.id) && (!noTests || !isTestFile(c.file))) { visited.add(c.id); nextFrontier.push(c.id); - levels[d] = levels[d] || []; + if (!levels[d]) levels[d] = []; levels[d]!.push({ name: c.name, kind: c.kind, file: c.file, line: c.line }); if (onVisit) onVisit(c, fid, d); } @@ -110,7 +118,7 @@ export function bfsTransitiveCallers( // If a caller is an interface/trait, also pull in its implementors // Implementors are one extra hop away, so record at d+1 if (resolveImplementors && INTERFACE_LIKE_KINDS.has(c.kind)) { - const impls = findImplementors(db, c.id); + const impls = findImplementors(db, c.id) as RelatedNodeRow[]; for (const impl of impls) { if (!visited.has(impl.id) && (!noTests || !isTestFile(impl.file))) { visited.add(impl.id); @@ -139,19 +147,13 @@ export function bfsTransitiveCallers( export function impactAnalysisData( file: string, - customDbPath: string | undefined, - opts: { - noTests?: boolean; - maxDepth?: number; - config?: any; - limit?: number; - offset?: number; - } = {}, -): object { + customDbPath: string, + opts: { noTests?: boolean } = {}, +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; - const fileNodes = findFileNodes(db, `%${file}%`); + const fileNodes = findFileNodes(db, `%${file}%`) as NodeRow[]; if (fileNodes.length === 0) { return { file, sources: [], levels: {}, totalDependents: 0 }; } @@ -169,7 +171,7 @@ export function impactAnalysisData( while (queue.length > 0) { const current = queue.shift()!; const level = levels.get(current)!; - const dependents = findImportDependents(db, current); + const dependents = findImportDependents(db, current) as RelatedNodeRow[]; for (const dep of dependents) { if (!visited.has(dep.id) && (!noTests || !isTestFile(dep.file))) { visited.add(dep.id); @@ -179,17 +181,17 @@ export function impactAnalysisData( } } - const byLevel: Record = {}; + const byLevel: Record> = {}; for (const [id, level] of levels) { if (level === 0) continue; if (!byLevel[level]) byLevel[level] = []; - const node = findNodeById(db, id); + const node = findNodeById(db, id) as NodeRow | undefined; if (node) byLevel[level].push({ file: node.file }); } return { file, - sources: fileNodes.map((f: any) => f.file), + sources: fileNodes.map((f) => f.file), levels: byLevel, totalDependents: visited.size - fileNodes.length, }; @@ -200,18 +202,19 @@ export function impactAnalysisData( export function fnImpactData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { - noTests?: boolean; depth?: number; - config?: any; + noTests?: boolean; file?: string; kind?: string; includeImplementors?: boolean; limit?: number; offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const config = opts.config || loadConfig(); @@ -226,7 +229,7 @@ export function fnImpactData( const includeImplementors = opts.includeImplementors !== false; - const results = nodes.map((node: any) => { + const results = nodes.map((node) => { const { levels, totalDependents } = bfsTransitiveCallers(db, node.id, { noTests, maxDepth, @@ -246,7 +249,7 @@ export function fnImpactData( } } -// ─── diffImpactData helpers ───────────────────────────────────────────── +// --- diffImpactData helpers --- /** * Walk up from repoRoot until a .git directory is found. @@ -272,7 +275,7 @@ function findGitRoot(repoRoot: string): boolean { function runGitDiff( repoRoot: string, opts: { staged?: boolean; ref?: string }, -): { output?: string; error?: string } { +): { output: string; error?: never } | { error: string; output?: never } { try { const args = opts.staged ? ['diff', '--cached', '--unified=0', '--no-color'] @@ -284,18 +287,15 @@ function runGitDiff( stdio: ['pipe', 'pipe', 'pipe'], }); return { output }; - } catch (e: any) { - return { error: `Failed to run git diff: ${e.message}` }; + } catch (e: unknown) { + return { error: `Failed to run git diff: ${(e as Error).message}` }; } } /** * Parse raw git diff output into a changedRanges map and newFiles set. */ -function parseGitDiff(diffOutput: string): { - changedRanges: Map>; - newFiles: Set; -} { +function parseGitDiff(diffOutput: string) { const changedRanges = new Map>(); const newFiles = new Set(); let currentFile: string | null = null; @@ -312,9 +312,9 @@ function parseGitDiff(diffOutput: string): { } const fileMatch = line.match(/^\+\+\+ b\/(.+)/); if (fileMatch) { - currentFile = fileMatch[1] ?? null; - if (currentFile && !changedRanges.has(currentFile)) changedRanges.set(currentFile, []); - if (currentFile && prevIsDevNull) newFiles.add(currentFile); + currentFile = fileMatch[1]!; + if (!changedRanges.has(currentFile)) changedRanges.set(currentFile, []); + if (prevIsDevNull) newFiles.add(currentFile!); prevIsDevNull = false; continue; } @@ -322,7 +322,7 @@ function parseGitDiff(diffOutput: string): { if (hunkMatch && currentFile) { const start = parseInt(hunkMatch[1]!, 10); const count = parseInt(hunkMatch[2] || '1', 10); - changedRanges.get(currentFile)?.push({ start, end: start + count - 1 }); + changedRanges.get(currentFile)!.push({ start, end: start + count - 1 }); } } @@ -333,21 +333,20 @@ function parseGitDiff(diffOutput: string): { * Find all function/method/class nodes whose line ranges overlap any changed range. */ function findAffectedFunctions( - db: any, + db: BetterSqlite3Database, changedRanges: Map>, noTests: boolean, -): any[] { - const affectedFunctions: any[] = []; +): NodeRow[] { + const affectedFunctions: NodeRow[] = []; + const defsStmt = db.prepare( + `SELECT * FROM nodes WHERE file = ? AND kind IN ('function', 'method', 'class') ORDER BY line`, + ); for (const [file, ranges] of changedRanges) { if (noTests && isTestFile(file)) continue; - const defs = db - .prepare( - `SELECT * FROM nodes WHERE file = ? AND kind IN ('function', 'method', 'class') ORDER BY line`, - ) - .all(file) as any[]; + const defs = defsStmt.all(file) as NodeRow[]; for (let i = 0; i < defs.length; i++) { - const def = defs[i]; - const endLine = def.end_line || (defs[i + 1] ? defs[i + 1].line - 1 : 999999); + const def = defs[i]!; + const endLine = def.end_line || (defs[i + 1] ? defs[i + 1]!.line - 1 : 999999); for (const range of ranges) { if (range.start <= endLine && range.end >= def.line) { affectedFunctions.push(def); @@ -363,15 +362,15 @@ function findAffectedFunctions( * Run BFS per affected function, collecting per-function results and the full affected set. */ function buildFunctionImpactResults( - db: any, - affectedFunctions: any[], + db: BetterSqlite3Database, + affectedFunctions: NodeRow[], noTests: boolean, maxDepth: number, includeImplementors = true, -): { functionResults: any[]; allAffected: Set } { +) { const allAffected = new Set(); - const functionResults = affectedFunctions.map((fn: any) => { - const edges: any[] = []; + const functionResults = affectedFunctions.map((fn) => { + const edges: Array<{ from: string; to: string }> = []; const idToKey = new Map(); idToKey.set(fn.id, `${fn.file}::${fn.name}:${fn.line}`); @@ -379,11 +378,11 @@ function buildFunctionImpactResults( noTests, maxDepth, includeImplementors, - onVisit(c: any, parentId: number) { + onVisit(c, parentId) { allAffected.add(`${c.file}:${c.name}`); const callerKey = `${c.file}::${c.name}:${c.line}`; idToKey.set(c.id, callerKey); - edges.push({ from: idToKey.get(parentId), to: callerKey }); + edges.push({ from: idToKey.get(parentId)!, to: callerKey }); }, }); @@ -406,11 +405,11 @@ function buildFunctionImpactResults( * Returns an empty array if the co_changes table is unavailable. */ function lookupCoChanges( - db: any, - changedRanges: Map, + db: BetterSqlite3Database, + changedRanges: Map, affectedFiles: Set, noTests: boolean, -): any[] { +) { try { db.prepare('SELECT 1 FROM co_changes LIMIT 1').get(); const changedFilesList = [...changedRanges.keys()]; @@ -419,9 +418,9 @@ function lookupCoChanges( limit: 20, noTests, }); - return coResults.filter((r: any) => !affectedFiles.has(r.file)); - } catch (e: any) { - debug(`co_changes lookup skipped: ${e.message}`); + return coResults.filter((r: { file: string }) => !affectedFiles.has(r.file)); + } catch (e: unknown) { + debug(`co_changes lookup skipped: ${(e as Error).message}`); return []; } } @@ -431,10 +430,10 @@ function lookupCoChanges( * Returns null if no owners are found or lookup fails. */ function lookupOwnership( - changedRanges: Map, + changedRanges: Map, affectedFiles: Set, repoRoot: string, -): { owners: object; affectedOwners: string[]; suggestedReviewers: string[] } | null { +) { try { const allFilePaths = [...new Set([...changedRanges.keys(), ...affectedFiles])]; const ownerResult = ownersForFiles(allFilePaths, repoRoot); @@ -446,8 +445,8 @@ function lookupOwnership( }; } return null; - } catch (e: any) { - debug(`CODEOWNERS lookup skipped: ${e.message}`); + } catch (e: unknown) { + debug(`CODEOWNERS lookup skipped: ${(e as Error).message}`); return null; } } @@ -457,12 +456,13 @@ function lookupOwnership( * Returns `{ boundaryViolations, boundaryViolationCount }`. */ function checkBoundaryViolations( - db: any, - changedRanges: Map, + db: BetterSqlite3Database, + changedRanges: Map, noTests: boolean, + // biome-ignore lint/suspicious/noExplicitAny: opts shape varies by caller opts: any, repoRoot: string, -): { boundaryViolations: any[]; boundaryViolationCount: number } { +) { try { const cfg = opts.config || loadConfig(repoRoot); const boundaryConfig = cfg.manifesto?.boundaries; @@ -476,31 +476,32 @@ function checkBoundaryViolations( boundaryViolationCount: result.violationCount, }; } - } catch (e: any) { - debug(`boundary check skipped: ${e.message}`); + } catch (e: unknown) { + debug(`boundary check skipped: ${(e as Error).message}`); } return { boundaryViolations: [], boundaryViolationCount: 0 }; } -// ─── diffImpactData ───────────────────────────────────────────────────── +// --- diffImpactData --- /** * Fix #2: Shell injection vulnerability. * Uses execFileSync instead of execSync to prevent shell interpretation of user input. */ export function diffImpactData( - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; - config?: any; depth?: number; staged?: boolean; ref?: string; includeImplementors?: boolean; limit?: number; offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -515,9 +516,9 @@ export function diffImpactData( } const gitResult = runGitDiff(repoRoot, opts); - if (gitResult.error) return { error: gitResult.error }; + if ('error' in gitResult) return { error: gitResult.error }; - if (!gitResult.output?.trim()) { + if (!gitResult.output.trim()) { return { changedFiles: 0, newFiles: [], @@ -527,7 +528,7 @@ export function diffImpactData( }; } - const { changedRanges, newFiles } = parseGitDiff(gitResult.output!); + const { changedRanges, newFiles } = parseGitDiff(gitResult.output); if (changedRanges.size === 0) { return { @@ -587,26 +588,28 @@ export function diffImpactData( } export function diffImpactMermaid( - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; - config?: any; depth?: number; staged?: boolean; ref?: string; includeImplementors?: boolean; limit?: number; offset?: number; + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic + config?: any; } = {}, -): string | object { +): string { + // biome-ignore lint/suspicious/noExplicitAny: paginateResult returns dynamic shape const data: any = diffImpactData(customDbPath, opts); - if (data.error) return data.error; + if ('error' in data) return data.error as string; if (data.changedFiles === 0 || data.affectedFunctions.length === 0) { return 'flowchart TB\n none["No impacted functions detected"]'; } const newFileSet = new Set(data.newFiles || []); - const lines: string[] = ['flowchart TB']; + const lines = ['flowchart TB']; // Assign stable Mermaid node IDs let nodeCounter = 0; @@ -623,8 +626,8 @@ export function diffImpactMermaid( // Register all nodes (changed functions + their callers) for (const fn of data.affectedFunctions) { nodeId(`${fn.file}::${fn.name}:${fn.line}`, fn.name); - for (const callers of Object.values(fn.levels || {}) as any[][]) { - for (const c of callers) { + for (const callers of Object.values(fn.levels || {})) { + for (const c of callers as Array<{ name: string; file: string; line: number }>) { nodeId(`${c.file}::${c.name}:${c.line}`, c.name); } } @@ -665,10 +668,10 @@ export function diffImpactMermaid( } // Group changed functions by file - const fileGroups = new Map(); + const fileGroups = new Map(); for (const fn of data.affectedFunctions) { if (!fileGroups.has(fn.file)) fileGroups.set(fn.file, []); - fileGroups.get(fn.file)?.push(fn); + fileGroups.get(fn.file)!.push(fn); } // Emit changed-file subgraphs diff --git a/src/domain/analysis/implementations.ts b/src/domain/analysis/implementations.ts index 7281e6ac..7d49a9c5 100644 --- a/src/domain/analysis/implementations.ts +++ b/src/domain/analysis/implementations.ts @@ -3,21 +3,17 @@ import { isTestFile } from '../../infrastructure/test-filter.js'; import { CORE_SYMBOL_KINDS } from '../../shared/kinds.js'; import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { RelatedNodeRow } from '../../types.js'; import { findMatchingNodes } from './symbol-lookup.js'; /** * Find all concrete types implementing a given interface/trait. - * - * @param {string} name - Interface/trait name (partial match) - * @param {string|undefined} customDbPath - * @param {{ noTests?: boolean, file?: string, kind?: string, limit?: number, offset?: number }} opts - * @returns {{ name: string, results: Array<{ name: string, kind: string, file: string, line: number, implementors: Array<{ name: string, kind: string, file: string, line: number }> }> }} */ export function implementationsData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; file?: string; kind?: string; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -27,15 +23,14 @@ export function implementationsData( noTests, file: opts.file, kind: opts.kind, - kinds: opts.kind ? undefined : [...CORE_SYMBOL_KINDS], + kinds: opts.kind ? undefined : CORE_SYMBOL_KINDS, }); if (nodes.length === 0) { return { name, results: [] }; } - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - const results = (nodes as any[]).map((node) => { - let implementors = findImplementors(db, node.id); + const results = nodes.map((node) => { + let implementors = findImplementors(db, node.id) as RelatedNodeRow[]; if (noTests) implementors = implementors.filter((n) => !isTestFile(n.file)); return { @@ -58,17 +53,12 @@ export function implementationsData( /** * Find all interfaces/traits that a given class/struct implements. - * - * @param {string} name - Class/struct name (partial match) - * @param {string|undefined} customDbPath - * @param {{ noTests?: boolean, file?: string, kind?: string, limit?: number, offset?: number }} opts - * @returns Object with name and results array containing interface info */ export function interfacesData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; file?: string; kind?: string; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -78,15 +68,14 @@ export function interfacesData( noTests, file: opts.file, kind: opts.kind, - kinds: opts.kind ? undefined : [...CORE_SYMBOL_KINDS], + kinds: opts.kind ? undefined : CORE_SYMBOL_KINDS, }); if (nodes.length === 0) { return { name, results: [] }; } - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - const results = (nodes as any[]).map((node) => { - let interfaces = findInterfaces(db, node.id); + const results = nodes.map((node) => { + let interfaces = findInterfaces(db, node.id) as RelatedNodeRow[]; if (noTests) interfaces = interfaces.filter((n) => !isTestFile(n.file)); return { diff --git a/src/domain/analysis/module-map.ts b/src/domain/analysis/module-map.ts index e2d3c79b..18fccef6 100644 --- a/src/domain/analysis/module-map.ts +++ b/src/domain/analysis/module-map.ts @@ -1,4 +1,5 @@ import path from 'node:path'; +import type BetterSqlite3 from 'better-sqlite3'; import { openReadonlyOrFail, testFilterSQL } from '../../db/index.js'; import { loadConfig } from '../../infrastructure/config.js'; import { debug } from '../../infrastructure/logger.js'; @@ -43,10 +44,11 @@ export const FALSE_POSITIVE_CALLER_THRESHOLD = 20; // Section helpers // --------------------------------------------------------------------------- -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function buildTestFileIds(db: any): Set { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const allFileNodes = db.prepare("SELECT id, file FROM nodes WHERE kind = 'file'").all() as any[]; +function buildTestFileIds(db: BetterSqlite3.Database): Set { + const allFileNodes = db.prepare("SELECT id, file FROM nodes WHERE kind = 'file'").all() as Array<{ + id: number; + file: string; + }>; const testFileIds = new Set(); const testFiles = new Set(); for (const n of allFileNodes) { @@ -55,30 +57,33 @@ function buildTestFileIds(db: any): Set { testFiles.add(n.file); } } - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const allNodes = db.prepare('SELECT id, file FROM nodes').all() as any[]; + const allNodes = db.prepare('SELECT id, file FROM nodes').all() as Array<{ + id: number; + file: string; + }>; for (const n of allNodes) { if (testFiles.has(n.file)) testFileIds.add(n.id); } return testFileIds; } -function countNodesByKind( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, - testFileIds: Set | null, -): { total: number; byKind: Record } { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - let nodeRows: any[]; +function countNodesByKind(db: BetterSqlite3.Database, testFileIds: Set | null) { + let nodeRows: Array<{ kind: string; c: number }>; if (testFileIds) { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const allNodes = db.prepare('SELECT id, kind, file FROM nodes').all() as any[]; + const allNodes = db.prepare('SELECT id, kind, file FROM nodes').all() as Array<{ + id: number; + kind: string; + file: string; + }>; const filtered = allNodes.filter((n) => !testFileIds.has(n.id)); const counts: Record = {}; for (const n of filtered) counts[n.kind] = (counts[n.kind] || 0) + 1; nodeRows = Object.entries(counts).map(([kind, c]) => ({ kind, c })); } else { - nodeRows = db.prepare('SELECT kind, COUNT(*) as c FROM nodes GROUP BY kind').all(); + nodeRows = db.prepare('SELECT kind, COUNT(*) as c FROM nodes GROUP BY kind').all() as Array<{ + kind: string; + c: number; + }>; } const byKind: Record = {}; let total = 0; @@ -89,16 +94,14 @@ function countNodesByKind( return { total, byKind }; } -function countEdgesByKind( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, - testFileIds: Set | null, -): { total: number; byKind: Record } { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - let edgeRows: any[]; +function countEdgesByKind(db: BetterSqlite3.Database, testFileIds: Set | null) { + let edgeRows: Array<{ kind: string; c: number }>; if (testFileIds) { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const allEdges = db.prepare('SELECT source_id, target_id, kind FROM edges').all() as any[]; + const allEdges = db.prepare('SELECT source_id, target_id, kind FROM edges').all() as Array<{ + source_id: number; + target_id: number; + kind: string; + }>; const filtered = allEdges.filter( (e) => !testFileIds.has(e.source_id) && !testFileIds.has(e.target_id), ); @@ -106,7 +109,10 @@ function countEdgesByKind( for (const e of filtered) counts[e.kind] = (counts[e.kind] || 0) + 1; edgeRows = Object.entries(counts).map(([kind, c]) => ({ kind, c })); } else { - edgeRows = db.prepare('SELECT kind, COUNT(*) as c FROM edges GROUP BY kind').all(); + edgeRows = db.prepare('SELECT kind, COUNT(*) as c FROM edges GROUP BY kind').all() as Array<{ + kind: string; + c: number; + }>; } const byKind: Record = {}; let total = 0; @@ -117,19 +123,16 @@ function countEdgesByKind( return { total, byKind }; } -function countFilesByLanguage( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, - noTests: boolean, -): { total: number; languages: number; byLanguage: Record } { +function countFilesByLanguage(db: BetterSqlite3.Database, noTests: boolean) { const extToLang = new Map(); for (const entry of LANGUAGE_REGISTRY) { for (const ext of entry.extensions) { extToLang.set(ext, entry.id); } } - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - let fileNodes = db.prepare("SELECT file FROM nodes WHERE kind = 'file'").all() as any[]; + let fileNodes = db.prepare("SELECT file FROM nodes WHERE kind = 'file'").all() as Array<{ + file: string; + }>; if (noTests) fileNodes = fileNodes.filter((n) => !isTestFile(n.file)); const byLanguage: Record = {}; for (const row of fileNodes) { @@ -140,12 +143,7 @@ function countFilesByLanguage( return { total: fileNodes.length, languages: Object.keys(byLanguage).length, byLanguage }; } -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function findHotspots( - db: any, - noTests: boolean, - limit: number, -): { file: string; fanIn: number; fanOut: number }[] { +function findHotspots(db: BetterSqlite3.Database, noTests: boolean, limit: number) { const testFilter = testFilterSQL('n.file', noTests); const hotspotRows = db .prepare(` @@ -157,8 +155,7 @@ function findHotspots( ORDER BY (SELECT COUNT(*) FROM edges WHERE target_id = n.id) + (SELECT COUNT(*) FROM edges WHERE source_id = n.id) DESC `) - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - .all() as any[]; + .all() as Array<{ file: string; fan_in: number; fan_out: number }>; const filtered = noTests ? hotspotRows.filter((r) => !isTestFile(r.file)) : hotspotRows; return filtered.slice(0, limit).map((r) => ({ file: r.file, @@ -167,15 +164,17 @@ function findHotspots( })); } -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function getEmbeddingsInfo(db: any): object | null { +function getEmbeddingsInfo(db: BetterSqlite3.Database) { try { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const count = db.prepare('SELECT COUNT(*) as c FROM embeddings').get() as any; + const count = db.prepare('SELECT COUNT(*) as c FROM embeddings').get() as + | { c: number } + | undefined; if (count && count.c > 0) { const meta: Record = {}; - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - const metaRows = db.prepare('SELECT key, value FROM embedding_meta').all() as any[]; + const metaRows = db.prepare('SELECT key, value FROM embedding_meta').all() as Array<{ + key: string; + value: string; + }>; for (const r of metaRows) meta[r.key] = r.value; return { count: count.c, @@ -184,38 +183,45 @@ function getEmbeddingsInfo(db: any): object | null { builtAt: meta['built_at'] || null, }; } - } catch (e) { + } catch (e: unknown) { debug(`embeddings lookup skipped: ${(e as Error).message}`); } return null; } function computeQualityMetrics( - // biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 - db: any, + db: BetterSqlite3.Database, testFilter: string, fpThreshold = FALSE_POSITIVE_CALLER_THRESHOLD, -): object { +) { const qualityTestFilter = testFilter.replace(/n\.file/g, 'file'); - const totalCallable = db - .prepare( - `SELECT COUNT(*) as c FROM nodes WHERE kind IN ('function', 'method') ${qualityTestFilter}`, - ) - .get().c; - const callableWithCallers = db - .prepare(` + const totalCallable = ( + db + .prepare( + `SELECT COUNT(*) as c FROM nodes WHERE kind IN ('function', 'method') ${qualityTestFilter}`, + ) + .get() as { c: number } + ).c; + const callableWithCallers = ( + db + .prepare(` SELECT COUNT(DISTINCT e.target_id) as c FROM edges e JOIN nodes n ON e.target_id = n.id WHERE e.kind = 'calls' AND n.kind IN ('function', 'method') ${testFilter} `) - .get().c; + .get() as { c: number } + ).c; const callerCoverage = totalCallable > 0 ? callableWithCallers / totalCallable : 0; - const totalCallEdges = db.prepare("SELECT COUNT(*) as c FROM edges WHERE kind = 'calls'").get().c; - const highConfCallEdges = db - .prepare("SELECT COUNT(*) as c FROM edges WHERE kind = 'calls' AND confidence >= 0.7") - .get().c; + const totalCallEdges = ( + db.prepare("SELECT COUNT(*) as c FROM edges WHERE kind = 'calls'").get() as { c: number } + ).c; + const highConfCallEdges = ( + db + .prepare("SELECT COUNT(*) as c FROM edges WHERE kind = 'calls' AND confidence >= 0.7") + .get() as { c: number } + ).c; const callConfidence = totalCallEdges > 0 ? highConfCallEdges / totalCallEdges : 0; const fpRows = db @@ -228,11 +234,10 @@ function computeQualityMetrics( HAVING caller_count > ? ORDER BY caller_count DESC `) - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - .all(fpThreshold) as any[]; + .all(fpThreshold) as Array<{ name: string; file: string; line: number; caller_count: number }>; const falsePositiveWarnings = fpRows .filter((r) => - FALSE_POSITIVE_NAMES.has(r.name.includes('.') ? r.name.split('.').pop() : r.name), + FALSE_POSITIVE_NAMES.has(r.name.includes('.') ? r.name.split('.').pop()! : r.name), ) .map((r) => ({ name: r.name, file: r.file, line: r.line, callerCount: r.caller_count })); @@ -260,15 +265,12 @@ function computeQualityMetrics( }; } -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function countRoles(db: any, noTests: boolean): Record { - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - let roleRows: any[]; +function countRoles(db: BetterSqlite3.Database, noTests: boolean) { + let roleRows: Array<{ role: string; c: number }>; if (noTests) { const allRoleNodes = db .prepare('SELECT role, file FROM nodes WHERE role IS NOT NULL') - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - .all() as any[]; + .all() as Array<{ role: string; file: string }>; const filtered = allRoleNodes.filter((n) => !isTestFile(n.file)); const counts: Record = {}; for (const n of filtered) counts[n.role] = (counts[n.role] || 0) + 1; @@ -276,7 +278,7 @@ function countRoles(db: any, noTests: boolean): Record { } else { roleRows = db .prepare('SELECT role, COUNT(*) as c FROM nodes WHERE role IS NOT NULL GROUP BY role') - .all(); + .all() as Array<{ role: string; c: number }>; } const roles: Record = {}; let deadTotal = 0; @@ -288,8 +290,7 @@ function countRoles(db: any, noTests: boolean): Record { return roles; } -// biome-ignore lint/suspicious/noExplicitAny: db handle from better-sqlite3 -function getComplexitySummary(db: any, testFilter: string): object | null { +function getComplexitySummary(db: BetterSqlite3.Database, testFilter: string) { try { const cRows = db .prepare( @@ -297,25 +298,25 @@ function getComplexitySummary(db: any, testFilter: string): object | null { FROM function_complexity fc JOIN nodes n ON fc.node_id = n.id WHERE n.kind IN ('function','method') ${testFilter}`, ) - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - .all() as any[]; + .all() as Array<{ + cognitive: number; + cyclomatic: number; + max_nesting: number; + maintainability_index: number; + }>; if (cRows.length > 0) { const miValues = cRows.map((r) => r.maintainability_index || 0); return { analyzed: cRows.length, - avgCognitive: +(cRows.reduce((s: number, r) => s + r.cognitive, 0) / cRows.length).toFixed( - 1, - ), - avgCyclomatic: +( - cRows.reduce((s: number, r) => s + r.cyclomatic, 0) / cRows.length - ).toFixed(1), + avgCognitive: +(cRows.reduce((s, r) => s + r.cognitive, 0) / cRows.length).toFixed(1), + avgCyclomatic: +(cRows.reduce((s, r) => s + r.cyclomatic, 0) / cRows.length).toFixed(1), maxCognitive: Math.max(...cRows.map((r) => r.cognitive)), maxCyclomatic: Math.max(...cRows.map((r) => r.cyclomatic)), - avgMI: +(miValues.reduce((s: number, v: number) => s + v, 0) / miValues.length).toFixed(1), + avgMI: +(miValues.reduce((s, v) => s + v, 0) / miValues.length).toFixed(1), minMI: +Math.min(...miValues).toFixed(1), }; } - } catch (e) { + } catch (e: unknown) { debug(`complexity summary skipped: ${(e as Error).message}`); } return null; @@ -325,11 +326,7 @@ function getComplexitySummary(db: any, testFilter: string): object | null { // Public API // --------------------------------------------------------------------------- -export function moduleMapData( - customDbPath: string | undefined, - limit = 20, - opts: { noTests?: boolean } = {}, -): object { +export function moduleMapData(customDbPath: string, limit = 20, opts: { noTests?: boolean } = {}) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -347,8 +344,7 @@ export function moduleMapData( ORDER BY (SELECT COUNT(*) FROM edges WHERE target_id = n.id AND kind NOT IN ('contains', 'parameter_of', 'receiver')) DESC LIMIT ? `) - // biome-ignore lint/suspicious/noExplicitAny: untyped SQLite row - .all(limit) as any[]; + .all(limit) as Array<{ file: string; in_edges: number; out_edges: number }>; const topNodes = nodes.map((n) => ({ file: n.file, @@ -358,9 +354,11 @@ export function moduleMapData( coupling: n.in_edges + n.out_edges, })); - const totalNodes = db.prepare('SELECT COUNT(*) as c FROM nodes').get().c; - const totalEdges = db.prepare('SELECT COUNT(*) as c FROM edges').get().c; - const totalFiles = db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'file'").get().c; + const totalNodes = (db.prepare('SELECT COUNT(*) as c FROM nodes').get() as { c: number }).c; + const totalEdges = (db.prepare('SELECT COUNT(*) as c FROM edges').get() as { c: number }).c; + const totalFiles = ( + db.prepare("SELECT COUNT(*) as c FROM nodes WHERE kind = 'file'").get() as { c: number } + ).c; return { limit, topNodes, stats: { totalFiles, totalNodes, totalEdges } }; } finally { @@ -369,10 +367,10 @@ export function moduleMapData( } export function statsData( - customDbPath: string | undefined, - // biome-ignore lint/suspicious/noExplicitAny: config shape varies by caller + customDbPath: string, + // biome-ignore lint/suspicious/noExplicitAny: config shape is dynamic opts: { noTests?: boolean; config?: any } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; diff --git a/src/domain/analysis/roles.ts b/src/domain/analysis/roles.ts index a496818e..159ed1fc 100644 --- a/src/domain/analysis/roles.ts +++ b/src/domain/analysis/roles.ts @@ -4,9 +4,10 @@ import { isTestFile } from '../../infrastructure/test-filter.js'; import { DEAD_ROLE_PREFIX } from '../../shared/kinds.js'; import { normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; +import type { NodeRow } from '../../types.js'; export function rolesData( - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; role?: string | null; @@ -14,13 +15,13 @@ export function rolesData( limit?: number; offset?: number; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; const filterRole = opts.role || null; - const conditions: string[] = ['role IS NOT NULL']; - const params: unknown[] = []; + const conditions = ['role IS NOT NULL']; + const params: (string | number)[] = []; if (filterRole) { if (filterRole === DEAD_ROLE_PREFIX) { @@ -32,7 +33,7 @@ export function rolesData( } } { - const fc = buildFileConditionSQL(opts.file ?? '', 'file'); + const fc = buildFileConditionSQL(opts.file || '', 'file'); if (fc.sql) { // Strip leading ' AND ' since we're using conditions array conditions.push(fc.sql.replace(/^ AND /, '')); @@ -44,14 +45,13 @@ export function rolesData( .prepare( `SELECT name, kind, file, line, end_line, role FROM nodes WHERE ${conditions.join(' AND ')} ORDER BY role, file, line`, ) - // biome-ignore lint/suspicious/noExplicitAny: DB row types not yet migrated - .all(...params) as any[]; + .all(...params) as NodeRow[]; if (noTests) rows = rows.filter((r) => !isTestFile(r.file)); const summary: Record = {}; for (const r of rows) { - summary[r.role] = (summary[r.role] || 0) + 1; + summary[r.role as string] = (summary[r.role as string] || 0) + 1; } const hc = new Map(); diff --git a/src/domain/analysis/symbol-lookup.ts b/src/domain/analysis/symbol-lookup.ts index a5252ab6..f7cf98cf 100644 --- a/src/domain/analysis/symbol-lookup.ts +++ b/src/domain/analysis/symbol-lookup.ts @@ -19,38 +19,52 @@ import { isTestFile } from '../../infrastructure/test-filter.js'; import { EVERY_SYMBOL_KIND } from '../../shared/kinds.js'; import { getFileHash, normalizeSymbol } from '../../shared/normalize.js'; import { paginateResult } from '../../shared/paginate.js'; -import type { SymbolKind } from '../../types.js'; +import type { + AdjacentEdgeRow, + BetterSqlite3Database, + ChildNodeRow, + ImportEdgeRow, + NodeRow, + NodeRowWithFanIn, + SymbolKind, +} from '../../types.js'; const FUNCTION_KINDS: SymbolKind[] = ['function', 'method', 'class', 'constant']; /** * Find nodes matching a name query, ranked by relevance. * Scoring: exact=100, prefix=60, word-boundary=40, substring=10, plus fan-in tiebreaker. - * - * @param {object} dbOrRepo - A better-sqlite3 Database or a Repository instance */ export function findMatchingNodes( - dbOrRepo: any, + dbOrRepo: BetterSqlite3Database | InstanceType, name: string, - opts: { kind?: string; kinds?: string[]; noTests?: boolean; file?: string } = {}, -): any[] { - const kinds: SymbolKind[] = opts.kind - ? [opts.kind as SymbolKind] - : opts.kinds?.length - ? (opts.kinds as SymbolKind[]) - : FUNCTION_KINDS; + opts: { noTests?: boolean; file?: string; kind?: string; kinds?: readonly string[] } = {}, +): Array { + const kinds = ( + opts.kind ? [opts.kind] : opts.kinds?.length ? [...opts.kinds] : FUNCTION_KINDS + ) as SymbolKind[]; const isRepo = dbOrRepo instanceof Repository; - const rows = isRepo - ? dbOrRepo.findNodesWithFanIn(`%${name}%`, { kinds, file: opts.file }) - : findNodesWithFanIn(dbOrRepo, `%${name}%`, { kinds, file: opts.file }); - - const nodes: any[] = opts.noTests ? rows.filter((n: any) => !isTestFile(n.file)) : rows; + const rows = ( + isRepo + ? (dbOrRepo as InstanceType).findNodesWithFanIn(`%${name}%`, { + kinds, + file: opts.file, + }) + : findNodesWithFanIn(dbOrRepo as BetterSqlite3Database, `%${name}%`, { + kinds, + file: opts.file, + }) + ) as NodeRowWithFanIn[]; + + const nodes: Array = ( + opts.noTests ? rows.filter((n) => !isTestFile(n.file)) : rows + ) as Array; const lowerQuery = name.toLowerCase(); for (const node of nodes) { const lowerName = node.name.toLowerCase(); - const bareName = lowerName.includes('.') ? lowerName.split('.').pop() : lowerName; + const bareName = lowerName.includes('.') ? lowerName.split('.').pop()! : lowerName; let matchScore: number; if (lowerName === lowerQuery || bareName === lowerQuery) { @@ -67,45 +81,45 @@ export function findMatchingNodes( node._relevance = matchScore + fanInBonus; } - nodes.sort((a: any, b: any) => b._relevance - a._relevance); + nodes.sort((a, b) => b._relevance - a._relevance); return nodes; } export function queryNameData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; - let nodes = db.prepare(`SELECT * FROM nodes WHERE name LIKE ?`).all(`%${name}%`) as any[]; - if (noTests) nodes = nodes.filter((n: any) => !isTestFile(n.file)); + let nodes = db.prepare(`SELECT * FROM nodes WHERE name LIKE ?`).all(`%${name}%`) as NodeRow[]; + if (noTests) nodes = nodes.filter((n) => !isTestFile(n.file)); if (nodes.length === 0) { return { query: name, results: [] }; } const hc = new Map(); - const results = nodes.map((node: any) => { - let callees = findAllOutgoingEdges(db, node.id); + const results = nodes.map((node) => { + let callees = findAllOutgoingEdges(db, node.id) as AdjacentEdgeRow[]; - let callers = findAllIncomingEdges(db, node.id); + let callers = findAllIncomingEdges(db, node.id) as AdjacentEdgeRow[]; if (noTests) { - callees = callees.filter((c: any) => !isTestFile(c.file)); - callers = callers.filter((c: any) => !isTestFile(c.file)); + callees = callees.filter((c) => !isTestFile(c.file)); + callers = callers.filter((c) => !isTestFile(c.file)); } return { ...normalizeSymbol(node, db, hc), - callees: callees.map((c: any) => ({ + callees: callees.map((c) => ({ name: c.name, kind: c.kind, file: c.file, line: c.line, edgeKind: c.edge_kind, })), - callers: callers.map((c: any) => ({ + callers: callers.map((c) => ({ name: c.name, kind: c.kind, file: c.file, @@ -122,50 +136,50 @@ export function queryNameData( } } -function whereSymbolImpl(db: any, target: string, noTests: boolean): any[] { +function whereSymbolImpl(db: BetterSqlite3Database, target: string, noTests: boolean) { const placeholders = EVERY_SYMBOL_KIND.map(() => '?').join(', '); let nodes = db .prepare( `SELECT * FROM nodes WHERE name LIKE ? AND kind IN (${placeholders}) ORDER BY file, line`, ) - .all(`%${target}%`, ...EVERY_SYMBOL_KIND) as any[]; - if (noTests) nodes = nodes.filter((n: any) => !isTestFile(n.file)); + .all(`%${target}%`, ...EVERY_SYMBOL_KIND) as NodeRow[]; + if (noTests) nodes = nodes.filter((n) => !isTestFile(n.file)); const hc = new Map(); - return nodes.map((node: any) => { + return nodes.map((node) => { const crossCount = countCrossFileCallers(db, node.id, node.file); const exported = crossCount > 0; - let uses = findCallers(db, node.id); - if (noTests) uses = uses.filter((u: any) => !isTestFile(u.file)); + let uses = findCallers(db, node.id) as Array<{ name: string; file: string; line: number }>; + if (noTests) uses = uses.filter((u) => !isTestFile(u.file)); return { ...normalizeSymbol(node, db, hc), exported, - uses: uses.map((u: any) => ({ name: u.name, file: u.file, line: u.line })), + uses: uses.map((u) => ({ name: u.name, file: u.file, line: u.line })), }; }); } -function whereFileImpl(db: any, target: string): any[] { - const fileNodes = findFileNodes(db, `%${target}%`); +function whereFileImpl(db: BetterSqlite3Database, target: string) { + const fileNodes = findFileNodes(db, `%${target}%`) as NodeRow[]; if (fileNodes.length === 0) return []; - return fileNodes.map((fn: any) => { - const symbols = findNodesByFile(db, fn.file); + return fileNodes.map((fn) => { + const symbols = findNodesByFile(db, fn.file) as NodeRow[]; - const imports = findImportTargets(db, fn.id).map((r: any) => r.file); + const imports = (findImportTargets(db, fn.id) as ImportEdgeRow[]).map((r) => r.file); - const importedBy = findImportSources(db, fn.id).map((r: any) => r.file); + const importedBy = (findImportSources(db, fn.id) as ImportEdgeRow[]).map((r) => r.file); - const exportedIds = findCrossFileCallTargets(db, fn.file); + const exportedIds = findCrossFileCallTargets(db, fn.file) as Set; - const exported = symbols.filter((s: any) => exportedIds.has(s.id)).map((s: any) => s.name); + const exported = symbols.filter((s) => exportedIds.has(s.id)).map((s) => s.name); return { file: fn.file, fileHash: getFileHash(db, fn.file), - symbols: symbols.map((s: any) => ({ name: s.name, kind: s.kind, line: s.line })), + symbols: symbols.map((s) => ({ name: s.name, kind: s.kind, line: s.line })), imports, importedBy, exported, @@ -175,9 +189,9 @@ function whereFileImpl(db: any, target: string): any[] { export function whereData( target: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; file?: boolean; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -193,7 +207,7 @@ export function whereData( } export function listFunctionsData( - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; file?: string; @@ -201,17 +215,17 @@ export function listFunctionsData( limit?: number; offset?: number; } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; - let rows = listFunctionNodes(db, { file: opts.file, pattern: opts.pattern }); + let rows = listFunctionNodes(db, { file: opts.file, pattern: opts.pattern }) as NodeRow[]; - if (noTests) rows = rows.filter((r: any) => !isTestFile(r.file)); + if (noTests) rows = rows.filter((r) => !isTestFile(r.file)); const hc = new Map(); - const functions = rows.map((r: any) => normalizeSymbol(r, db, hc)); + const functions = rows.map((r) => normalizeSymbol(r, db, hc)); const base = { count: functions.length, functions }; return paginateResult(base, 'functions', { limit: opts.limit, offset: opts.offset }); } finally { @@ -221,9 +235,9 @@ export function listFunctionsData( export function childrenData( name: string, - customDbPath: string | undefined, + customDbPath: string, opts: { noTests?: boolean; file?: string; kind?: string; limit?: number; offset?: number } = {}, -): object { +) { const db = openReadonlyOrFail(customDbPath); try { const noTests = opts.noTests || false; @@ -233,15 +247,18 @@ export function childrenData( return { name, results: [] }; } - const results = nodes.map((node: any) => { - let children: any[]; + const results = nodes.map((node) => { + let children: ChildNodeRow[]; try { - children = findNodeChildren(db, node.id); - } catch (e: any) { - debug(`findNodeChildren failed for node ${node.id}: ${e.message}`); + children = findNodeChildren(db, node.id) as ChildNodeRow[]; + } catch (e: unknown) { + debug(`findNodeChildren failed for node ${node.id}: ${(e as Error).message}`); children = []; } - if (noTests) children = children.filter((c: any) => !isTestFile(c.file || node.file)); + if (noTests) + children = children.filter( + (c) => !isTestFile((c as ChildNodeRow & { file?: string }).file || node.file), + ); return { name: node.name, kind: node.kind, @@ -250,7 +267,7 @@ export function childrenData( scope: node.scope || null, visibility: node.visibility || null, qualifiedName: node.qualified_name || null, - children: children.map((c: any) => ({ + children: children.map((c) => ({ name: c.name, kind: c.kind, line: c.line, diff --git a/src/domain/parser.ts b/src/domain/parser.ts index 04ee55eb..88abb89b 100644 --- a/src/domain/parser.ts +++ b/src/domain/parser.ts @@ -1,10 +1,10 @@ import fs from 'node:fs'; import path from 'node:path'; import { fileURLToPath } from 'node:url'; +import type { Tree } from 'web-tree-sitter'; import { Language, Parser, Query } from 'web-tree-sitter'; import { debug, warn } from '../infrastructure/logger.js'; import { getNative, getNativePackageVersion, loadNative } from '../infrastructure/native.js'; -import type { EngineMode, ExtractorOutput, LanguageRegistryEntry } from '../types.js'; // Re-export all extractors for backward compatibility export { @@ -37,16 +37,51 @@ function grammarPath(name: string): string { return path.join(__dirname, '..', '..', 'grammars', name); } -let _initialized = false; +let _initialized: boolean = false; // Memoized parsers — avoids reloading WASM grammars on every createParsers() call -let _cachedParsers: Map | null = null; +let _cachedParsers: Map | null = null; // Cached Language objects — WASM-backed, must be .delete()'d explicitly -let _cachedLanguages: Map | null = null; +let _cachedLanguages: Map | null = null; // Query cache for JS/TS/TSX extractors (populated during createParsers) -const _queryCache: Map = new Map(); +const _queryCache: Map = new Map(); + +// Extensions that need typeMap backfill (type annotations only exist in TS/TSX) +const TS_BACKFILL_EXTS = new Set(['.ts', '.tsx']); + +/** + * Declarative registry entry for a supported language. + */ +export interface LanguageRegistryEntry { + id: string; + extensions: string[]; + grammarFile: string; + // biome-ignore lint/suspicious/noExplicitAny: extractor signatures vary per language + extractor: (...args: any[]) => any; + required: boolean; +} + +interface EngineOpts { + engine?: string; + dataflow?: boolean; + ast?: boolean; +} + +interface ResolvedEngine { + name: 'native' | 'wasm'; + // biome-ignore lint/suspicious/noExplicitAny: native addon has no type declarations + native: any; +} + +// biome-ignore lint/suspicious/noExplicitAny: extractor return types vary per language +interface WasmExtractResult { + // biome-ignore lint/suspicious/noExplicitAny: extractor return shapes vary per language + symbols: any; + tree: Tree; + langId: string; +} // Shared patterns for all JS/TS/TSX (class_declaration excluded — name type differs) const COMMON_QUERY_PATTERNS: string[] = [ @@ -63,7 +98,7 @@ const COMMON_QUERY_PATTERNS: string[] = [ ]; // JS: class name is (identifier) -const JS_CLASS_PATTERN = '(class_declaration name: (identifier) @cls_name) @cls_node'; +const JS_CLASS_PATTERN: string = '(class_declaration name: (identifier) @cls_name) @cls_node'; // TS/TSX: class name is (type_identifier), plus interface and type alias const TS_EXTRA_PATTERNS: string[] = [ @@ -72,7 +107,7 @@ const TS_EXTRA_PATTERNS: string[] = [ '(type_alias_declaration name: (type_identifier) @type_name) @type_node', ]; -export async function createParsers(): Promise> { +export async function createParsers(): Promise> { if (_cachedParsers) return _cachedParsers; if (!_initialized) { @@ -80,8 +115,8 @@ export async function createParsers(): Promise> { _initialized = true; } - const parsers: Map = new Map(); - const languages: Map = new Map(); + const parsers = new Map(); + const languages = new Map(); for (const entry of LANGUAGE_REGISTRY) { try { const lang = await Language.load(grammarPath(entry.grammarFile)); @@ -97,10 +132,10 @@ export async function createParsers(): Promise> { : [...COMMON_QUERY_PATTERNS, JS_CLASS_PATTERN]; _queryCache.set(entry.id, new Query(lang, patterns.join('\n'))); } - } catch (e: any) { + } catch (e: unknown) { if (entry.required) throw e; warn( - `${entry.id} parser failed to initialize: ${e.message}. ${entry.id} files will be skipped.`, + `${entry.id} parser failed to initialize: ${(e as Error).message}. ${entry.id} files will be skipped.`, ); parsers.set(entry.id, null); } @@ -121,8 +156,8 @@ export function disposeParsers(): void { if (parser && typeof parser.delete === 'function') { try { parser.delete(); - } catch (e: any) { - debug(`Failed to dispose parser ${id}: ${e.message}`); + } catch (e: unknown) { + debug(`Failed to dispose parser ${id}: ${(e as Error).message}`); } } } @@ -132,19 +167,20 @@ export function disposeParsers(): void { if (query && typeof query.delete === 'function') { try { query.delete(); - } catch (e: any) { - debug(`Failed to dispose query ${id}: ${e.message}`); + } catch (e: unknown) { + debug(`Failed to dispose query ${id}: ${(e as Error).message}`); } } } _queryCache.clear(); if (_cachedLanguages) { for (const [id, lang] of _cachedLanguages) { - if (lang && typeof lang.delete === 'function') { + // biome-ignore lint/suspicious/noExplicitAny: .delete() exists at runtime on WASM Language objects but is missing from typings + if (lang && typeof (lang as any).delete === 'function') { try { - lang.delete(); - } catch (e: any) { - debug(`Failed to dispose language ${id}: ${e.message}`); + (lang as any).delete(); + } catch (e: unknown) { + debug(`Failed to dispose language ${id}: ${(e as Error).message}`); } } } @@ -153,7 +189,7 @@ export function disposeParsers(): void { _initialized = false; } -export function getParser(parsers: Map, filePath: string): any | null { +export function getParser(parsers: Map, filePath: string): Parser | null { const ext = path.extname(filePath); const entry = _extToLang.get(ext); if (!entry) return null; @@ -165,8 +201,9 @@ export function getParser(parsers: Map, filePath: string): any | nu * don't each need to create parsers and re-parse independently. * Only parses files whose extension is in SUPPORTED_EXTENSIONS. */ +// biome-ignore lint/suspicious/noExplicitAny: fileSymbols values have dynamic shape from extractors export async function ensureWasmTrees( - fileSymbols: Map, + fileSymbols: Map, rootDir: string, ): Promise { // Check if any file needs a tree @@ -196,15 +233,15 @@ export async function ensureWasmTrees( let code: string; try { code = fs.readFileSync(absPath, 'utf-8'); - } catch (e: any) { - debug(`ensureWasmTrees: cannot read ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`ensureWasmTrees: cannot read ${relPath}: ${(e as Error).message}`); continue; } try { symbols._tree = parser.parse(code); symbols._langId = entry.id; - } catch (e: any) { - debug(`ensureWasmTrees: parse failed for ${relPath}: ${e.message}`); + } catch (e: unknown) { + debug(`ensureWasmTrees: parse failed for ${relPath}: ${(e as Error).message}`); } } } @@ -220,12 +257,7 @@ export function isWasmAvailable(): boolean { // ── Unified API ────────────────────────────────────────────────────────────── -interface ResolvedEngine { - name: string; - native: any; -} - -function resolveEngine(opts: { engine?: EngineMode; nativeEngine?: any } = {}): ResolvedEngine { +function resolveEngine(opts: EngineOpts = {}): ResolvedEngine { const pref = opts.engine || 'auto'; if (pref === 'wasm') return { name: 'wasm', native: null }; if (pref === 'native' || pref === 'auto') { @@ -244,8 +276,9 @@ function resolveEngine(opts: { engine?: EngineMode; nativeEngine?: any } = {}): * This only handles: * - _lineCount compat for builder.js * - Backward compat for older native binaries missing js_name annotations - * - dataflow argFlows/mutations bindingType → binding wrapper + * - dataflow argFlows/mutations bindingType -> binding wrapper */ +// biome-ignore lint/suspicious/noExplicitAny: native result has dynamic shape function patchNativeResult(r: any): any { // lineCount: napi(js_name) emits "lineCount"; older binaries may emit "line_count" r.lineCount = r.lineCount ?? r.line_count ?? null; @@ -390,9 +423,10 @@ export const SUPPORTED_EXTENSIONS: Set = new Set(_extToLang.keys()); * matches inside comments and string literals. * TODO: Remove once all published native binaries include typeMap extraction (>= 3.2.0) */ +// biome-ignore lint/suspicious/noExplicitAny: return shape matches native result typeMap async function backfillTypeMap( filePath: string, - source: string, + source?: string, ): Promise<{ typeMap: any; backfilled: boolean }> { let code = source; if (!code) { @@ -408,9 +442,12 @@ async function backfillTypeMap( if (!extracted?.symbols?.typeMap) { return { typeMap: [], backfilled: false }; } - const tm: any = extracted.symbols.typeMap; + const tm = extracted.symbols.typeMap; return { - typeMap: tm instanceof Map ? tm : new Map(tm.map((e: any) => [e.name, e.typeName])), + typeMap: + tm instanceof Map + ? tm + : new Map(tm.map((e: { name: string; typeName: string }) => [e.name, e.typeName])), backfilled: true, }; } finally { @@ -427,20 +464,21 @@ async function backfillTypeMap( * WASM extraction helper: picks the right extractor based on file extension. */ function wasmExtractSymbols( - parsers: Map, + parsers: Map, filePath: string, code: string, -): { symbols: ExtractorOutput; tree: any; langId: string } | null { +): WasmExtractResult | null { const parser = getParser(parsers, filePath); if (!parser) return null; - let tree: any; + let tree: Tree | null; try { tree = parser.parse(code); - } catch (e: any) { - warn(`Parse error in ${filePath}: ${e.message}`); + } catch (e: unknown) { + warn(`Parse error in ${filePath}: ${(e as Error).message}`); return null; } + if (!tree) return null; const ext = path.extname(filePath); const entry = _extToLang.get(ext); @@ -453,19 +491,12 @@ function wasmExtractSymbols( /** * Parse a single file and return normalized symbols. */ +// biome-ignore lint/suspicious/noExplicitAny: return shape varies between native and WASM engines export async function parseFileAuto( filePath: string, source: string, - opts: { - engine?: EngineMode; - nativeEngine?: any; - parsers?: Map; - rootDir?: string; - aliases?: any; - dataflow?: boolean; - ast?: boolean; - } = {}, -): Promise { + opts: EngineOpts = {}, +): Promise { const { native } = resolveEngine(opts); if (native) { @@ -474,7 +505,6 @@ export async function parseFileAuto( const patched = patchNativeResult(result); // Only backfill typeMap for TS/TSX — JS files have no type annotations, // and the native engine already handles `new Expr()` patterns. - const TS_BACKFILL_EXTS = new Set(['.ts', '.tsx']); if ( (!patched.typeMap || patched.typeMap.length === 0) && TS_BACKFILL_EXTS.has(path.extname(filePath)) @@ -495,21 +525,15 @@ export async function parseFileAuto( /** * Parse multiple files in bulk and return a Map. */ +// biome-ignore lint/suspicious/noExplicitAny: return shape varies between native and WASM engines export async function parseFilesAuto( filePaths: string[], rootDir: string, - opts: { - engine?: EngineMode; - nativeEngine?: any; - parsers?: Map; - aliases?: any; - signal?: AbortSignal; - dataflow?: boolean; - ast?: boolean; - } = {}, -): Promise> { + opts: EngineOpts = {}, +): Promise> { const { native } = resolveEngine(opts); - const result: Map = new Map(); + // biome-ignore lint/suspicious/noExplicitAny: result values have dynamic shape from extractors + const result = new Map(); if (native) { const nativeResults = native.parseFiles( @@ -532,24 +556,29 @@ export async function parseFilesAuto( if (needsTypeMap.length > 0) { // Only backfill for languages where WASM extraction can produce typeMap // (TS/TSX have type annotations; JS only has `new Expr()` which native already handles) - const TS_EXTS = new Set(['.ts', '.tsx']); - const tsFiles = needsTypeMap.filter(({ filePath }) => TS_EXTS.has(path.extname(filePath))); + const tsFiles = needsTypeMap.filter(({ filePath }) => + TS_BACKFILL_EXTS.has(path.extname(filePath)), + ); if (tsFiles.length > 0) { const parsers = await createParsers(); for (const { filePath, relPath } of tsFiles) { - let extracted: { symbols: ExtractorOutput; tree: any; langId: string } | null | undefined; + let extracted: WasmExtractResult | null | undefined; try { const code = fs.readFileSync(filePath, 'utf-8'); extracted = wasmExtractSymbols(parsers, filePath, code); if (extracted?.symbols?.typeMap) { - const symbols = result.get(relPath)!; - (symbols as any).typeMap = + const symbols = result.get(relPath); + if (!symbols) continue; + symbols.typeMap = extracted.symbols.typeMap instanceof Map ? extracted.symbols.typeMap : new Map( - (extracted.symbols.typeMap as any).map((e: any) => [e.name, e.typeName]), + extracted.symbols.typeMap.map((e: { name: string; typeName: string }) => [ + e.name, + e.typeName, + ]), ); - (symbols as any)._typeMapBackfilled = true; + symbols._typeMapBackfilled = true; } } catch { /* skip — typeMap is a best-effort backfill */ @@ -573,15 +602,15 @@ export async function parseFilesAuto( let code: string; try { code = fs.readFileSync(filePath, 'utf-8'); - } catch (err: any) { - warn(`Skipping ${path.relative(rootDir, filePath)}: ${err.message}`); + } catch (err: unknown) { + warn(`Skipping ${path.relative(rootDir, filePath)}: ${(err as Error).message}`); continue; } const extracted = wasmExtractSymbols(parsers, filePath, code); if (extracted) { const relPath = path.relative(rootDir, filePath).split(path.sep).join('/'); extracted.symbols._tree = extracted.tree; - extracted.symbols._langId = extracted.langId as any; + extracted.symbols._langId = extracted.langId; extracted.symbols._lineCount = code.split('\n').length; result.set(relPath, extracted.symbols); } @@ -592,8 +621,8 @@ export async function parseFilesAuto( /** * Report which engine is active. */ -export function getActiveEngine(opts: { engine?: EngineMode; nativeEngine?: any } = {}): { - name: string; +export function getActiveEngine(opts: EngineOpts = {}): { + name: 'native' | 'wasm'; version: string | null; } { const { name, native } = resolveEngine(opts); @@ -607,8 +636,8 @@ export function getActiveEngine(opts: { engine?: EngineMode; nativeEngine?: any if (native) { try { version = getNativePackageVersion() ?? version; - } catch (e: any) { - debug(`getNativePackageVersion failed: ${e.message}`); + } catch (e: unknown) { + debug(`getNativePackageVersion failed: ${(e as Error).message}`); } } return { name, version }; @@ -618,6 +647,7 @@ export function getActiveEngine(opts: { engine?: EngineMode; nativeEngine?: any * Create a native ParseTreeCache for incremental parsing. * Returns null if the native engine is unavailable (WASM fallback). */ +// biome-ignore lint/suspicious/noExplicitAny: native ParseTreeCache has no type declarations export function createParseTreeCache(): any { const native = loadNative(); if (!native || !native.ParseTreeCache) return null; @@ -627,25 +657,19 @@ export function createParseTreeCache(): any { /** * Parse a file incrementally using the cache, or fall back to full parse. */ +// biome-ignore lint/suspicious/noExplicitAny: cache is native ParseTreeCache with no type declarations; return shape varies export async function parseFileIncremental( cache: any, filePath: string, source: string, - opts: { - engine?: EngineMode; - nativeEngine?: any; - parsers?: Map; - rootDir?: string; - aliases?: any; - } = {}, -): Promise { + opts: EngineOpts = {}, +): Promise { if (cache) { const result = cache.parseFile(filePath, source); if (!result) return null; const patched = patchNativeResult(result); // Only backfill typeMap for TS/TSX — JS files have no type annotations, // and the native engine already handles `new Expr()` patterns. - const TS_BACKFILL_EXTS = new Set(['.ts', '.tsx']); if ( (!patched.typeMap || patched.typeMap.length === 0) && TS_BACKFILL_EXTS.has(path.extname(filePath)) diff --git a/src/types.ts b/src/types.ts index 9aa8bf7c..4e08f77a 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1650,16 +1650,17 @@ export interface SqliteStatement { all(...params: unknown[]): TRow[]; run(...params: unknown[]): { changes: number; lastInsertRowid: number | bigint }; iterate(...params: unknown[]): IterableIterator; + raw(toggle?: boolean): this; } /** Minimal database interface matching the better-sqlite3 surface we use. */ export interface BetterSqlite3Database { prepare(sql: string): SqliteStatement; - exec(sql: string): void; + exec(sql: string): this; close(): void; pragma(sql: string): unknown; // biome-ignore lint/suspicious/noExplicitAny: must be compatible with better-sqlite3's generic Transaction return type - transaction(fn: (...args: any[]) => T): (...args: any[]) => T; + transaction any>(fn: F): F; readonly open: boolean; readonly name: string; } diff --git a/src/vendor.d.ts b/src/vendor.d.ts index 9edc233b..e8c49fdf 100644 --- a/src/vendor.d.ts +++ b/src/vendor.d.ts @@ -19,6 +19,7 @@ declare module 'better-sqlite3' { get(...params: unknown[]): unknown | undefined; all(...params: unknown[]): unknown[]; iterate(...params: unknown[]): IterableIterator; + raw(toggle?: boolean): this; } interface RunResult { diff --git a/tests/helpers/node-version.js b/tests/helpers/node-version.js new file mode 100644 index 00000000..f8603a82 --- /dev/null +++ b/tests/helpers/node-version.js @@ -0,0 +1,6 @@ +/** + * Node >= 22.6 supports --experimental-strip-types, required for tests that + * spawn child processes loading .ts source files directly. + */ +const [major, minor] = process.versions.node.split('.').map(Number); +export const canStripTypes = major > 22 || (major === 22 && minor >= 6);