diff --git a/.fmm.db b/.fmm.db deleted file mode 100644 index 1ac62ed..0000000 Binary files a/.fmm.db and /dev/null differ diff --git a/.gitignore b/.gitignore index 9a389f0..0af02f8 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.fmm.db .nancy/ # Rust diff --git a/Cargo.toml b/Cargo.toml index deedb55..fbac78e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,6 +50,7 @@ ctrlc = "3.4" # Parallelism rayon = "1.10" +indicatif = { version = "0.17", features = ["rayon"] } # Database rusqlite = { version = "0.32", features = ["bundled"] } diff --git a/fixtures/typescript/mega_function.ts b/fixtures/typescript/mega_function.ts new file mode 100644 index 0000000..477ea65 --- /dev/null +++ b/fixtures/typescript/mega_function.ts @@ -0,0 +1,39 @@ +// Test fixture for ALP-922: nested symbol extraction from mega-functions + +export function createTypeChecker(host: any): any { + // Prologue vars — non-trivial (have call expressions or type annotations) + var silentNeverType = createIntrinsicType(TypeFlags.Never, "never"); + const checker: TypeChecker = {} as TypeChecker; + var compilerOptions = host.getCompilerOptions(); + + // Trivial prologue vars — should NOT be extracted + var inStrictMode = false; + let counter = 0; + + // Depth-1 nested function declarations — should be extracted + function getIndexType(type: any, index: any): any { + return undefined; + } + + function getReturnType(signature: any): any { + return undefined; + } + + // A nested function with its own nested function (depth > 1 — should NOT be extracted) + function outerHelper() { + function innerHelper() { + // depth 2 — must not appear in index + } + } + + return checker; +} + +// Non-exported function with nested declarations — should still be indexed +function internalHelper(): void { + var state = createState(); + + function processItem(item: any): void { + return; + } +} diff --git a/src/cli/init.rs b/src/cli/init.rs index c00df9b..9629848 100644 --- a/src/cli/init.rs +++ b/src/cli/init.rs @@ -56,8 +56,7 @@ pub fn init(skill: bool, mcp: bool, all: bool, no_generate: bool) -> Result<()> lang_set.into_iter().collect::>().join(", ") ); - println!("{}", "Generating index...".green().bold()); - sidecar::generate(&[".".to_string()], false, false)?; + sidecar::generate(&[".".to_string()], false, false, false)?; // Show DB stats and a sample export let root = super::resolve_root(".")?; diff --git a/src/cli/mod.rs b/src/cli/mod.rs index 60f3204..cafd4aa 100644 --- a/src/cli/mod.rs +++ b/src/cli/mod.rs @@ -98,6 +98,10 @@ pub enum Commands { /// Re-index all files, bypassing mtime comparison #[arg(short, long)] force: bool, + + /// Suppress progress bars — print only the final summary line + #[arg(short = 'q', long)] + quiet: bool, }, /// Check the index is current (CI-friendly, exit 1 if stale) diff --git a/src/cli/sidecar.rs b/src/cli/sidecar.rs index d438093..fafaee7 100644 --- a/src/cli/sidecar.rs +++ b/src/cli/sidecar.rs @@ -1,8 +1,11 @@ use anyhow::Result; use chrono::Utc; use colored::Colorize; +use indicatif::ParallelProgressIterator; +use indicatif::{ProgressBar, ProgressStyle}; use rayon::prelude::*; use rusqlite::params; +use std::time::{Duration, Instant}; use crate::config::Config; use crate::db; @@ -11,11 +14,33 @@ use crate::resolver; use super::{collect_files_multi, resolve_root_multi}; -pub fn generate(paths: &[String], dry_run: bool, force: bool) -> Result<()> { +/// Show progress bars when at least this many files need processing. +const PROGRESS_THRESHOLD: usize = 10; + +pub fn generate(paths: &[String], dry_run: bool, force: bool, quiet: bool) -> Result<()> { + let total_start = Instant::now(); let config = Config::load().unwrap_or_default(); + + // Scan phase: spinner while walking the directory tree. + let scan_sp = if !quiet { + let sp = ProgressBar::new_spinner(); + sp.set_style( + ProgressStyle::with_template("{spinner:.blue} Scanning files...") + .expect("valid template"), + ); + sp.enable_steady_tick(Duration::from_millis(80)); + Some(sp) + } else { + None + }; + let files = collect_files_multi(paths, &config)?; let root = resolve_root_multi(paths)?; + if let Some(sp) = &scan_sp { + sp.finish_and_clear(); + } + if files.is_empty() { println!("{} No supported source files found", "!".yellow()); println!( @@ -35,10 +60,6 @@ pub fn generate(paths: &[String], dry_run: bool, force: bool) -> Result<()> { return Ok(()); } - println!("Found {} files to process", files.len()); - - let processor = FileProcessor::new(&root); - if dry_run { // Dry run: show what would be indexed without touching the DB. let dirty_files: Vec<&std::path::PathBuf> = if let Ok(conn) = db::open_db(&root) { @@ -85,10 +106,17 @@ pub fn generate(paths: &[String], dry_run: bool, force: bool) -> Result<()> { let workspace_info = resolver::workspace::discover(&root); db::writer::upsert_workspace_packages(&conn, &workspace_info.packages)?; - // Phase 1 (sequential): determine which files are stale in the DB. - // mtime comparison is O(1) per file and fast even at 4,673 files. + // Phase 1: bulk staleness check. + // Load all indexed_at times in one query (avoids 39k individual SELECTs), + // then compare in parallel with rayon (mtime syscalls are I/O-parallel). + let phase1_start = Instant::now(); + let indexed_mtimes: std::collections::HashMap = if !force { + db::writer::load_indexed_mtimes(&conn)? + } else { + std::collections::HashMap::new() + }; let dirty_files: Vec<&std::path::PathBuf> = files - .iter() + .par_iter() .filter(|file| { if force { return true; @@ -98,15 +126,55 @@ pub fn generate(paths: &[String], dry_run: bool, force: bool) -> Result<()> { .unwrap_or(file) .display() .to_string(); - let mtime = db::writer::file_mtime_rfc3339(file); - !db::writer::is_file_up_to_date(&conn, &rel, mtime.as_deref()) + let Some(mtime) = db::writer::file_mtime_rfc3339(file) else { + return true; // unreadable mtime → treat as dirty + }; + // Dirty when not in DB, or stored indexed_at < file mtime. + indexed_mtimes + .get(&rel) + .map(|indexed_at| indexed_at.as_str() < mtime.as_str()) + .unwrap_or(true) }) .collect(); + let phase1_elapsed = phase1_start.elapsed(); + + if dirty_files.is_empty() { + let elapsed = total_start.elapsed(); + println!( + "Found {} files · all up to date ({:.1}s)", + files.len(), + elapsed.as_secs_f64() + ); + db::writer::write_meta(&conn, "fmm_version", env!("CARGO_PKG_VERSION"))?; + db::writer::write_meta(&conn, "generated_at", &Utc::now().to_rfc3339())?; + return Ok(()); + } - if !dirty_files.is_empty() { - // Phase 2 (parallel): parse all stale files. - let parse_results: Vec<(std::path::PathBuf, crate::parser::ParseResult)> = dirty_files + let show_progress = !quiet && dirty_files.len() >= PROGRESS_THRESHOLD; + + if !quiet { + println!( + "Found {} files · {} changed", + files.len(), + dirty_files.len() + ); + } + + let processor = FileProcessor::new(&root); + + // Phase 2 (parallel): parse all stale files. + let phase2_start = Instant::now(); + let parse_results: Vec<(std::path::PathBuf, crate::parser::ParseResult)> = if show_progress { + let pb = ProgressBar::new(dirty_files.len() as u64); + pb.set_style( + ProgressStyle::with_template( + "Parsing {wide_bar:.cyan/blue} {pos}/{len} {per_sec} ETA {eta}", + ) + .expect("valid template"), + ); + let results = dirty_files .par_iter() + .progress_with(pb.clone()) .filter_map(|file| match processor.parse(file) { Ok(result) => Some(((*file).clone(), result)), Err(e) => { @@ -115,45 +183,118 @@ pub fn generate(paths: &[String], dry_run: bool, force: bool) -> Result<()> { } }) .collect(); - - // Phase 3 (transacted): write all parsed results to DB in one commit. - { - let tx = conn.transaction()?; - for (abs_path, result) in &parse_results { - let rel = abs_path - .strip_prefix(&root) - .unwrap_or(abs_path) - .display() - .to_string(); - let mtime = db::writer::file_mtime_rfc3339(abs_path); - db::writer::upsert_file_data(&tx, &rel, result, mtime.as_deref())?; + pb.finish_and_clear(); + results + } else { + dirty_files + .par_iter() + .filter_map(|file| match processor.parse(file) { + Ok(result) => Some(((*file).clone(), result)), + Err(e) => { + eprintln!("{} {}: {}", "error:".red().bold(), file.display(), e); + None + } + }) + .collect() + }; + let phase2_elapsed = phase2_start.elapsed(); + + // Phase 2b (parallel): pre-serialize JSON fields for all parsed files. + // serde_json::to_string is CPU-bound — rayon cuts this from O(N) serial to + // O(N/cores) before we enter the single-threaded SQLite transaction. + let phase2b_start = Instant::now(); + let serialized_rows: Vec = parse_results + .par_iter() + .filter_map(|(abs_path, result)| { + let rel = abs_path + .strip_prefix(&root) + .unwrap_or(abs_path) + .display() + .to_string(); + let mtime = db::writer::file_mtime_rfc3339(abs_path); + match db::writer::serialize_file_data(&rel, result, mtime.as_deref()) { + Ok(row) => Some(row), + Err(e) => { + eprintln!( + "{} serialize {}: {}", + "error:".red().bold(), + abs_path.display(), + e + ); + None + } + } + }) + .collect(); + let phase2b_elapsed = phase2b_start.elapsed(); + + // Phase 3 (transacted): write pre-serialized rows to DB in one commit. + // JSON serialization already done in parallel — this loop is pure SQLite I/O. + let phase3_start = Instant::now(); + { + let tx = conn.transaction()?; + if show_progress { + let pb = ProgressBar::new(serialized_rows.len() as u64); + pb.set_style( + ProgressStyle::with_template("Writing {wide_bar:.green/blue} {pos}/{len}") + .expect("valid template"), + ); + for row in &serialized_rows { + db::writer::upsert_preserialized(&tx, row)?; + pb.inc(1); + } + pb.finish_and_clear(); + } else { + for row in &serialized_rows { + db::writer::upsert_preserialized(&tx, row)?; } - tx.commit()?; - } - - // Phase 4: rebuild the pre-computed reverse dependency graph. - db::writer::rebuild_and_write_reverse_deps(&mut conn, &root)?; - - for abs_path in &dirty_files { - let rel = abs_path.strip_prefix(&root).unwrap_or(abs_path); - println!("{} {}", "✓".green(), rel.display()); } - println!( - "\n{} {} file(s) indexed", - "✓".green().bold(), - dirty_files.len() - ); - println!( - "\n {} Run 'fmm validate' to verify, or 'fmm search --export ' to find symbols", - "next:".cyan() + tx.commit()?; + } + let phase3_elapsed = phase3_start.elapsed(); + + // Phase 4: rebuild the pre-computed reverse dependency graph. + let phase4_start = Instant::now(); + if show_progress { + let sp = ProgressBar::new_spinner(); + sp.set_style( + ProgressStyle::with_template("{spinner:.blue} Building dependency graph...") + .expect("valid template"), ); + sp.enable_steady_tick(Duration::from_millis(80)); + db::writer::rebuild_and_write_reverse_deps(&mut conn, &root)?; + sp.finish_and_clear(); } else { - println!("{} All files up to date", "✓".green()); + db::writer::rebuild_and_write_reverse_deps(&mut conn, &root)?; } + let phase4_elapsed = phase4_start.elapsed(); db::writer::write_meta(&conn, "fmm_version", env!("CARGO_PKG_VERSION"))?; db::writer::write_meta(&conn, "generated_at", &Utc::now().to_rfc3339())?; + let total_elapsed = total_start.elapsed(); + + println!( + "{} {} file(s) indexed in {:.1}s", + "Done ✓".green().bold(), + serialized_rows.len(), + total_elapsed.as_secs_f64() + ); + + if !quiet { + let accounted = + phase1_elapsed + phase2_elapsed + phase2b_elapsed + phase3_elapsed + phase4_elapsed; + let other = total_elapsed.saturating_sub(accounted); + println!( + " parse: {:.1}s · serialize: {:.1}s · write: {:.1}s · deps: {:.1}s · other: {:.1}s", + phase2_elapsed.as_secs_f64(), + phase2b_elapsed.as_secs_f64(), + phase3_elapsed.as_secs_f64(), + phase4_elapsed.as_secs_f64(), + other.as_secs_f64(), + ); + } + Ok(()) } diff --git a/src/cli/watch.rs b/src/cli/watch.rs index 369e7d1..2a6e476 100644 --- a/src/cli/watch.rs +++ b/src/cli/watch.rs @@ -22,7 +22,7 @@ pub fn watch(path: &str, debounce_ms: u64) -> Result<()> { // Initial generate pass println!("{}", "Running initial generate pass...".green().bold()); - super::generate(&[path.to_string()], false, false)?; + super::generate(&[path.to_string()], false, false, false)?; let file_count = collect_files(path, &config)?.len(); println!("\nWatching {} files in {} ...\n", file_count, path); diff --git a/src/db/mod.rs b/src/db/mod.rs index 5bcbf38..0751bd6 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -6,7 +6,7 @@ use rusqlite::Connection; use std::path::Path; pub const DB_FILENAME: &str = ".fmm.db"; -const SCHEMA_VERSION: u32 = 1; +const SCHEMA_VERSION: u32 = 2; /// Opens or creates the fmm SQLite database at `root/.fmm.db`. /// @@ -156,13 +156,15 @@ CREATE TABLE IF NOT EXISTS exports ( CREATE INDEX IF NOT EXISTS idx_exports_name ON exports(name); CREATE INDEX IF NOT EXISTS idx_exports_file ON exports(file_path); --- Class/interface methods for dotted-name lookups (e.g. 'MyClass.doThing'). --- Replaces the in-memory method_index. +-- Class/interface methods and nested function symbols for dotted-name lookups. +-- kind: NULL = class method, 'nested-fn' = depth-1 nested function (ALP-922), +-- 'closure-state' = depth-1 non-trivial prologue var (ALP-922). CREATE TABLE IF NOT EXISTS methods ( dotted_name TEXT NOT NULL, file_path TEXT NOT NULL REFERENCES files(path) ON DELETE CASCADE, start_line INTEGER, end_line INTEGER, + kind TEXT, PRIMARY KEY (dotted_name, file_path) ); CREATE INDEX IF NOT EXISTS idx_methods_name ON methods(dotted_name); diff --git a/src/db/reader.rs b/src/db/reader.rs index da84f0c..e291644 100644 --- a/src/db/reader.rs +++ b/src/db/reader.rs @@ -78,6 +78,7 @@ fn load_files(conn: &Connection, manifest: &mut Manifest) -> Result<()> { function_names, named_imports, namespace_imports, + ..Default::default() }, ); } @@ -214,7 +215,7 @@ fn load_exports(conn: &Connection, manifest: &mut Manifest) -> Result<()> { fn load_methods(conn: &Connection, manifest: &mut Manifest) -> Result<()> { let mut stmt = - conn.prepare("SELECT dotted_name, file_path, start_line, end_line FROM methods")?; + conn.prepare("SELECT dotted_name, file_path, start_line, end_line, kind FROM methods")?; let rows = stmt.query_map([], |row| { Ok(( @@ -222,11 +223,12 @@ fn load_methods(conn: &Connection, manifest: &mut Manifest) -> Result<()> { row.get::<_, String>(1)?, row.get::<_, Option>(2)?, row.get::<_, Option>(3)?, + row.get::<_, Option>(4)?, )) })?; for row in rows { - let (dotted_name, file_path, start, end) = row?; + let (dotted_name, file_path, start, end, kind) = row?; let lines = match (start, end) { (Some(s), Some(e)) if s > 0 => Some(ExportLines { start: s as usize, @@ -235,15 +237,27 @@ fn load_methods(conn: &Connection, manifest: &mut Manifest) -> Result<()> { _ => None, }; - // Populate FileEntry.methods so the read_symbol tool can build redirect hints - // for large classes without re-querying the DB. + let el = lines.clone().unwrap_or(ExportLines { start: 0, end: 0 }); + + // Route into the correct FileEntry bucket based on kind. if let Some(fe) = manifest.files.get_mut(&file_path) { - fe.methods.get_or_insert_with(HashMap::new).insert( - dotted_name.clone(), - lines.clone().unwrap_or(ExportLines { start: 0, end: 0 }), - ); + match kind.as_deref() { + Some("nested-fn") => { + fe.nested_fns.insert(dotted_name.clone(), el); + } + Some("closure-state") => { + fe.closure_state.insert(dotted_name.clone(), el); + } + _ => { + // NULL kind = class method + fe.methods + .get_or_insert_with(HashMap::new) + .insert(dotted_name.clone(), el); + } + } } + // All kinds go into method_index so fmm_read_symbol("Parent.child") works. manifest.method_index.insert( dotted_name, ExportLocation { diff --git a/src/db/writer.rs b/src/db/writer.rs index a2b886a..5561433 100644 --- a/src/db/writer.rs +++ b/src/db/writer.rs @@ -41,6 +41,170 @@ pub fn is_file_up_to_date(conn: &Connection, rel_path: &str, source_mtime: Optio .unwrap_or(false) } +/// Load all `(path, indexed_at)` pairs from the DB in one query. +/// +/// Used by the bulk staleness check in `fmm generate` to avoid 39k individual +/// queries. The returned map is keyed by relative file path. +pub fn load_indexed_mtimes(conn: &Connection) -> Result> { + let mut stmt = conn.prepare("SELECT path, indexed_at FROM files")?; + let map = stmt + .query_map([], |row| { + Ok((row.get::<_, String>(0)?, row.get::<_, String>(1)?)) + })? + .filter_map(|r| r.ok()) + .collect(); + Ok(map) +} + +/// All data needed to write one file to the DB, with JSON fields pre-serialized. +/// +/// Computing JSON strings is CPU-bound and can be done in parallel (rayon) +/// before the single-threaded SQLite transaction in Phase 3. +pub struct PreserializedRow { + pub rel_path: String, + pub loc: i64, + pub mtime: Option, + pub imports_json: String, + pub deps_json: String, + pub named_imports_json: String, + pub namespace_imports_json: String, + pub function_names_json: String, + pub indexed_at: String, + pub exports: Vec, + pub methods: Vec, +} + +/// A flattened export entry ready for direct DB insertion. +pub struct ExportRecord { + pub name: String, + pub start_line: i64, + pub end_line: i64, +} + +/// A flattened method entry ready for direct DB insertion. +pub struct MethodRecord { + pub dotted_name: String, + pub start_line: i64, + pub end_line: i64, + /// ALP-922: NULL = class method, "nested-fn", "closure-state". + pub kind: Option, +} + +/// Serialize all JSON fields for a parsed file — CPU-bound work safe to run in rayon. +/// +/// Call this in parallel across dirty files, then pass the results to +/// `upsert_preserialized` inside the single-threaded SQLite transaction. +pub fn serialize_file_data( + rel_path: &str, + result: &ParseResult, + mtime: Option<&str>, +) -> Result { + let meta = &result.metadata; + let function_names = extract_function_names(result.custom_fields.as_ref()); + + let exports: Vec = meta + .exports + .iter() + .filter(|e| e.parent_class.is_none()) + .map(|e| ExportRecord { + name: e.name.clone(), + start_line: e.start_line as i64, + end_line: e.end_line as i64, + }) + .collect(); + + let mut seen = std::collections::HashSet::new(); + let methods: Vec = meta + .exports + .iter() + .filter_map(|e| { + e.parent_class.as_ref().and_then(|class| { + let key = format!("{}.{}", class, e.name); + if seen.insert(key.clone()) { + Some(MethodRecord { + dotted_name: key, + start_line: e.start_line as i64, + end_line: e.end_line as i64, + kind: e.kind.clone(), + }) + } else { + None + } + }) + }) + .collect(); + + Ok(PreserializedRow { + rel_path: rel_path.to_string(), + loc: meta.loc as i64, + mtime: mtime.map(String::from), + imports_json: serde_json::to_string(&meta.imports).context("serialize imports")?, + deps_json: serde_json::to_string(&meta.dependencies).context("serialize dependencies")?, + named_imports_json: serde_json::to_string(&meta.named_imports) + .context("serialize named_imports")?, + namespace_imports_json: serde_json::to_string(&meta.namespace_imports) + .context("serialize namespace_imports")?, + function_names_json: serde_json::to_string(&function_names) + .context("serialize function_names")?, + indexed_at: Utc::now().to_rfc3339(), + exports, + methods, + }) +} + +/// Write a pre-serialized file row to the DB within an open transaction. +/// +/// Unlike `upsert_file_data`, this takes already-serialized JSON strings so +/// the CPU-bound serialization work can be done outside the transaction. +pub fn upsert_preserialized(tx: &Transaction<'_>, row: &PreserializedRow) -> Result<()> { + tx.execute( + "INSERT OR REPLACE INTO files + (path, loc, modified, imports, dependencies, named_imports, + namespace_imports, function_names, indexed_at) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9)", + params![ + row.rel_path, + row.loc, + row.mtime, + row.imports_json, + row.deps_json, + row.named_imports_json, + row.namespace_imports_json, + row.function_names_json, + row.indexed_at, + ], + ) + .context("Failed to upsert file row")?; + + { + let mut stmt = tx.prepare_cached( + "INSERT OR REPLACE INTO exports (name, file_path, start_line, end_line) + VALUES (?1, ?2, ?3, ?4)", + )?; + for e in &row.exports { + stmt.execute(params![e.name, row.rel_path, e.start_line, e.end_line])?; + } + } + + { + let mut stmt = tx.prepare_cached( + "INSERT OR REPLACE INTO methods (dotted_name, file_path, start_line, end_line, kind) + VALUES (?1, ?2, ?3, ?4, ?5)", + )?; + for m in &row.methods { + stmt.execute(params![ + m.dotted_name, + row.rel_path, + m.start_line, + m.end_line, + m.kind, + ])?; + } + } + + Ok(()) +} + /// Insert or replace a complete file record plus its exports and methods. /// /// Because the `files` table uses `INSERT OR REPLACE` with a PRIMARY KEY @@ -100,8 +264,8 @@ pub fn upsert_file_data( // dotted name for each signature, deduplicated the same way as the YAML formatter). { let mut stmt = tx.prepare_cached( - "INSERT OR REPLACE INTO methods (dotted_name, file_path, start_line, end_line) - VALUES (?1, ?2, ?3, ?4)", + "INSERT OR REPLACE INTO methods (dotted_name, file_path, start_line, end_line, kind) + VALUES (?1, ?2, ?3, ?4, ?5)", )?; let mut seen = std::collections::HashSet::new(); for entry in &meta.exports { @@ -113,6 +277,7 @@ pub fn upsert_file_data( rel_path, entry.start_line as i64, entry.end_line as i64, + entry.kind, ])?; } } @@ -186,6 +351,7 @@ pub fn load_files_map(conn: &Connection) -> Result> { function_names, named_imports, namespace_imports, + ..Default::default() }, ); } diff --git a/src/format/yaml_formatters.rs b/src/format/yaml_formatters.rs index 6685373..a429fbc 100644 --- a/src/format/yaml_formatters.rs +++ b/src/format/yaml_formatters.rs @@ -29,12 +29,13 @@ pub fn format_file_outline( if !entry.exports.is_empty() { lines.push("symbols:".to_string()); for (i, name) in entry.exports.iter().enumerate() { + let prefix = format!("{}.", name); + // Collect public methods belonging to this class (prefix "ClassName.") let class_methods: Vec<_> = entry .methods .as_ref() .map(|m| { - let prefix = format!("{}.", name); let mut v: Vec<_> = m .iter() .filter(|(k, _)| k.starts_with(&prefix)) @@ -49,6 +50,29 @@ pub fn format_file_outline( }) .unwrap_or_default(); + // ALP-922: nested function declarations (depth-1) under this function + let mut nested_fn_list: Vec<_> = entry + .nested_fns + .iter() + .filter(|(k, _)| k.starts_with(&prefix)) + .map(|(k, v)| (k.trim_start_matches(&prefix).to_string(), v)) + .collect(); + nested_fn_list.sort_by_key(|(_, el)| el.start); + + // ALP-922: closure-state vars — only when include_private requested + let include_private_flag = private_by_class.is_some(); + let mut closure_state_list: Vec<_> = if include_private_flag { + entry + .closure_state + .iter() + .filter(|(k, _)| k.starts_with(&prefix)) + .map(|(k, v)| (k.trim_start_matches(&prefix).to_string(), v)) + .collect() + } else { + Vec::new() + }; + closure_state_list.sort_by_key(|(_, el)| el.start); + // Private members for this class (only when include_private requested) let private_members: &[PrivateMember] = private_by_class .and_then(|m| m.get(name.as_str())) @@ -58,114 +82,123 @@ pub fn format_file_outline( if let Some(el) = entry.export_lines.as_ref().and_then(|els| els.get(i)) { let size = el.end.saturating_sub(el.start) + 1; let private_count = private_members.len(); + let nested_fn_count = nested_fn_list.len(); + let closure_state_count = closure_state_list.len(); - match (class_methods.is_empty(), private_count) { - (true, 0) => { - lines.push(format!( - " {}: [{}, {}] # {} lines", - yaml_escape(name), - el.start, - el.end, - size - )); + // Build annotation: summarize what sub-entries are present. + let mut annotation_parts: Vec = Vec::new(); + if !class_methods.is_empty() { + annotation_parts.push(format!("{} public methods", class_methods.len())); + } + if nested_fn_count > 0 { + annotation_parts.push(format!("{} nested functions", nested_fn_count)); + } + if private_count > 0 { + let pm_count = private_members.iter().filter(|m| m.is_method).count(); + let pf_count = private_count - pm_count; + if pm_count > 0 { + annotation_parts.push(format!("{} private methods", pm_count)); } - (false, 0) => { - lines.push(format!( - " {}: [{}, {}] # {} lines, {} public methods", - yaml_escape(name), - el.start, - el.end, - size, - class_methods.len() - )); - for (method_name, method_lines) in &class_methods { - lines.push(format!( - " {}: [{}, {}]", - yaml_escape(method_name), - method_lines.start, - method_lines.end - )); - } + if pf_count > 0 { + annotation_parts.push(format!("{} private fields", pf_count)); } - (true, _) => { - lines.push(format!( - " {}: [{}, {}] # {} lines, {} private members", - yaml_escape(name), - el.start, - el.end, - size, - private_count + } + if include_private_flag && closure_state_count > 0 { + annotation_parts.push(format!("{} closure-state", closure_state_count)); + } + + let annotation = if annotation_parts.is_empty() { + format!( + " {}: [{}, {}] # {} lines", + yaml_escape(name), + el.start, + el.end, + size + ) + } else { + format!( + " {}: [{}, {}] # {} lines, {}", + yaml_escape(name), + el.start, + el.end, + size, + annotation_parts.join(", ") + ) + }; + lines.push(annotation); + + // Sub-entries: build combined list sorted by start line. + // (start, short_name, end, suffix) + let mut sub_entries: Vec<(usize, String, usize, &'static str)> = Vec::new(); + + // Determine whether interleaving by start line is needed: + // only when private or nested items are present alongside class methods. + let needs_start_sort = !private_members.is_empty() + || !nested_fn_list.is_empty() + || !closure_state_list.is_empty(); + + if needs_start_sort { + // Mixed sub-entries: sort class methods by start line for interleaving. + let mut public_sorted = class_methods.clone(); + public_sorted.sort_by_key(|(_, el)| el.start); + for (method_name, method_lines) in &public_sorted { + sub_entries.push(( + method_lines.start, + method_name.clone(), + method_lines.end, + "", )); - for pm in private_members { - let suffix = if pm.is_method { - " # private" - } else { - " # private field" - }; - lines.push(format!( - " {}: [{}, {}]{}", - yaml_escape(&pm.name), - pm.start, - pm.end, - suffix - )); - } } - (false, _) => { - let private_method_count = - private_members.iter().filter(|m| m.is_method).count(); - let private_field_count = private_count - private_method_count; - let mut summary = format!( - " {}: [{}, {}] # {} lines, {} public methods, {} private methods", - yaml_escape(name), - el.start, - el.end, - size, - class_methods.len(), - private_method_count - ); - if private_field_count > 0 { - summary.push_str(&format!(", {} private fields", private_field_count)); - } - lines.push(summary); - - // Merge public (by start line) and private, interleaved by line number. - // Public methods are sorted by size desc by the collector above; re-sort - // by start line for interleaved display. - let mut public_sorted = class_methods.clone(); - public_sorted.sort_by_key(|(_, el)| el.start); - - // Build a combined list of (start, label, end, suffix) - let mut combined: Vec<(usize, String, usize, &str)> = Vec::new(); - for (method_name, method_lines) in &public_sorted { - combined.push(( - method_lines.start, - method_name.clone(), - method_lines.end, - "", - )); - } - for pm in private_members { - let suffix = if pm.is_method { - " # private" - } else { - " # private field" - }; - combined.push((pm.start, pm.name.clone(), pm.end, suffix)); - } - combined.sort_by_key(|(start, _, _, _)| *start); - - for (start, method_name, end, suffix) in &combined { - lines.push(format!( - " {}: [{}, {}]{}", - yaml_escape(method_name), - start, - end, - suffix - )); - } + } else { + // Class methods only: preserve size-descending order (original behaviour). + for (method_name, method_lines) in &class_methods { + sub_entries.push(( + method_lines.start, + method_name.clone(), + method_lines.end, + "", + )); } } + + // Nested functions + for (fn_name, fn_lines) in &nested_fn_list { + sub_entries.push((fn_lines.start, fn_name.clone(), fn_lines.end, "")); + } + + // Private class members + for pm in private_members { + let suffix = if pm.is_method { + " # private" + } else { + " # private field" + }; + sub_entries.push((pm.start, pm.name.clone(), pm.end, suffix)); + } + + // Closure-state vars (only with include_private) + for (var_name, var_lines) in &closure_state_list { + sub_entries.push(( + var_lines.start, + var_name.clone(), + var_lines.end, + " # closure-state", + )); + } + + if needs_start_sort { + sub_entries.sort_by_key(|(start, _, _, _)| *start); + } + + for (start, sub_name, end, suffix) in &sub_entries { + lines.push(format!( + " {}: [{}, {}]{}", + yaml_escape(sub_name), + start, + end, + suffix + )); + } } else { lines.push(format!(" {}", yaml_escape(name))); } diff --git a/src/main.rs b/src/main.rs index 07a8b8e..bdc8ff9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -71,9 +71,9 @@ fn run() -> anyhow::Result<()> { paths, dry_run, force, + quiet, } => { - println!("{}", "Indexing source files...".green().bold()); - cli::generate(&paths, dry_run, force)?; + cli::generate(&paths, dry_run, force, quiet)?; } Commands::Validate { paths } => { println!("{}", "Validating index...".green().bold()); diff --git a/src/manifest/mod.rs b/src/manifest/mod.rs index 21b049f..4de39bf 100644 --- a/src/manifest/mod.rs +++ b/src/manifest/mod.rs @@ -56,6 +56,16 @@ pub struct FileEntry { /// Source paths of namespace imports and wildcard re-exports. Populated from sidecar. #[serde(skip)] pub namespace_imports: Vec, + /// ALP-922: depth-1 nested function declarations inside function bodies. + /// dotted_name (e.g. "createTypeChecker.getIndexType") -> line range. + /// Always shown in fmm_file_outline. Searchable via fmm_search. + #[serde(skip)] + pub nested_fns: HashMap, + /// ALP-922: depth-1 non-trivial prologue var/const/let declarations. + /// dotted_name (e.g. "createTypeChecker.silentNeverType") -> line range. + /// Shown only when include_private: true in fmm_file_outline. + #[serde(skip)] + pub closure_state: HashMap, } impl From for FileEntry { @@ -63,17 +73,27 @@ impl From for FileEntry { let mut exports = Vec::new(); let mut export_lines = Vec::new(); let mut methods: HashMap = HashMap::new(); + let mut nested_fns: HashMap = HashMap::new(); + let mut closure_state: HashMap = HashMap::new(); for e in &metadata.exports { - if let Some(ref class) = e.parent_class { - let key = format!("{}.{}", class, e.name); - methods.insert( - key, - ExportLines { - start: e.start_line, - end: e.end_line, - }, - ); + if let Some(ref parent) = e.parent_class { + let key = format!("{}.{}", parent, e.name); + let el = ExportLines { + start: e.start_line, + end: e.end_line, + }; + match e.kind.as_deref() { + Some("nested-fn") => { + nested_fns.insert(key, el); + } + Some("closure-state") => { + closure_state.insert(key, el); + } + _ => { + methods.insert(key, el); + } + } } else { exports.push(e.name.clone()); export_lines.push(ExportLines { @@ -99,6 +119,8 @@ impl From for FileEntry { function_names: Vec::new(), named_imports: metadata.named_imports, namespace_imports: metadata.namespace_imports, + nested_fns, + closure_state, } } } @@ -206,12 +228,18 @@ impl Manifest { self.export_all.remove(old_export); } } - // Remove old method entries for this file + // Remove old method/nested-fn/closure-state entries for this file if let Some(ref old_methods) = old_entry.methods { for key in old_methods.keys() { self.method_index.remove(key); } } + for key in old_entry.nested_fns.keys() { + self.method_index.remove(key); + } + for key in old_entry.closure_state.keys() { + self.method_index.remove(key); + } } for export_entry in &metadata.exports { @@ -317,6 +345,12 @@ impl Manifest { self.method_index.remove(key); } } + for key in entry.nested_fns.keys() { + self.method_index.remove(key); + } + for key in entry.closure_state.keys() { + self.method_index.remove(key); + } } } diff --git a/src/mcp/tests.rs b/src/mcp/tests.rs index 79d33ae..21ca286 100644 --- a/src/mcp/tests.rs +++ b/src/mcp/tests.rs @@ -1927,6 +1927,7 @@ fn glossary_layer2_filters_non_symbol_importers() { function_names: vec!["myFunc".to_string()], named_imports: HashMap::new(), namespace_imports: vec![], + ..Default::default() }; let mut caller_named = HashMap::new(); @@ -1942,6 +1943,7 @@ fn glossary_layer2_filters_non_symbol_importers() { function_names: vec![], named_imports: caller_named, namespace_imports: vec![], + ..Default::default() }; let mut bystander_named = HashMap::new(); @@ -1958,6 +1960,7 @@ fn glossary_layer2_filters_non_symbol_importers() { function_names: vec![], named_imports: bystander_named, namespace_imports: vec![], + ..Default::default() }; manifest.files.insert("source.js".to_string(), source_entry); diff --git a/src/parser/builtin/typescript.rs b/src/parser/builtin/typescript.rs index 053b0be..3491d9f 100644 --- a/src/parser/builtin/typescript.rs +++ b/src/parser/builtin/typescript.rs @@ -631,6 +631,150 @@ impl TypeScriptParser { } } +impl TypeScriptParser { + /// ALP-922: Extract depth-1 nested function declarations and prologue variables + /// from all top-level function bodies. Only processes JS/TS function_declaration nodes + /// (exported or bare). Arrow functions assigned to variables are skipped — they have + /// no named nested declarations in practice. + fn extract_nested_symbols( + source: &str, + root_node: tree_sitter::Node, + ) -> Vec { + let source_bytes = source.as_bytes(); + let mut entries = Vec::new(); + + for i in 0..root_node.child_count() { + let child = match root_node.child(i as u32) { + Some(c) => c, + None => continue, + }; + + let fn_node = match child.kind() { + "function_declaration" => Some(child), + "export_statement" => { + // exported function_declaration is typically the second child + let mut found = None; + for j in 0..child.child_count() { + if let Some(c) = child.child(j as u32) { + if c.kind() == "function_declaration" { + found = Some(c); + break; + } + } + } + found + } + _ => None, + }; + + let fn_node = match fn_node { + Some(n) => n, + None => continue, + }; + + let fn_name = match fn_node + .child_by_field_name("name") + .and_then(|n| n.utf8_text(source_bytes).ok()) + { + Some(n) => n.to_string(), + None => continue, + }; + + let body = match fn_node.child_by_field_name("body") { + Some(b) => b, + None => continue, + }; + + let mut first_nested_fn_seen = false; + + for j in 0..body.child_count() { + let stmt = match body.child(j as u32) { + Some(s) => s, + None => continue, + }; + + match stmt.kind() { + "function_declaration" => { + first_nested_fn_seen = true; + let nested_name = match stmt + .child_by_field_name("name") + .and_then(|n| n.utf8_text(source_bytes).ok()) + { + Some(n) => n.to_string(), + None => continue, + }; + entries.push(crate::parser::ExportEntry::nested_fn( + nested_name, + stmt.start_position().row + 1, + stmt.end_position().row + 1, + fn_name.clone(), + )); + } + "lexical_declaration" | "variable_declaration" if !first_nested_fn_seen => { + // Prologue: extract individual declarators that are non-trivial + for k in 0..stmt.child_count() { + let decl = match stmt.child(k as u32) { + Some(d) if d.kind() == "variable_declarator" => d, + _ => continue, + }; + let var_name = match decl + .child_by_field_name("name") + .and_then(|n| n.utf8_text(source_bytes).ok()) + { + Some(n) => n.to_string(), + None => continue, + }; + if Self::is_non_trivial_declarator(decl) { + entries.push(crate::parser::ExportEntry::closure_state( + var_name, + decl.start_position().row + 1, + decl.end_position().row + 1, + fn_name.clone(), + )); + } + } + } + _ => {} + } + } + } + + entries + } + + /// Return true when a variable_declarator is worth indexing as closure-state: + /// it has a type annotation, or its value starts with a call expression. + fn is_non_trivial_declarator(decl: tree_sitter::Node) -> bool { + // Check for type_annotation child + for i in 0..decl.child_count() { + if let Some(child) = decl.child(i as u32) { + if child.kind() == "type_annotation" { + return true; + } + } + } + // Check value for call_expression (or as_expression wrapping one) + if let Some(value) = decl.child_by_field_name("value") { + if value.kind() == "call_expression" { + return true; + } + // Handle `foo() as Type` (as_expression) or `new Foo()` (new_expression) + if value.kind() == "as_expression" || value.kind() == "new_expression" { + return true; + } + // One level deeper: `(call())` — parenthesized expression + for i in 0..value.child_count() { + if let Some(child) = value.child(i as u32) { + if child.kind() == "call_expression" || child.kind() == "new_expression" { + return true; + } + } + } + } + false + } +} + impl Parser for TypeScriptParser { fn parse(&mut self, source: &str) -> Result { self.parse_with_aliases(source, &HashMap::new()) @@ -687,6 +831,11 @@ impl TypeScriptParser { .collect(); let methods = self.extract_class_methods(source, root_node, &exported_classes); exports.extend(methods); + + // ALP-922: extract depth-1 nested function declarations and prologue vars + let nested = Self::extract_nested_symbols(source, root_node); + exports.extend(nested); + exports.sort_by_key(|e| e.start_line); let decorators = self.extract_decorators(source, root_node); @@ -1680,4 +1829,167 @@ import { c } from './mod-b'; ); assert_eq!(match_alias("@nestjs/common", "@/*", &targets), None); } + + // --- ALP-922: Nested symbol extraction --- + + #[test] + fn nested_fn_extracted_from_exported_function() { + let source = r#" +export function createTypeChecker(host: any): any { + var silentNeverType = createIntrinsicType(TypeFlags.Never, "never"); + function getIndexType(type: any): any { return undefined; } + function getReturnType(sig: any): any { return undefined; } + return {}; +} +"#; + let result = parse(source); + let nested: Vec<_> = result + .metadata + .exports + .iter() + .filter(|e| e.parent_class.as_deref() == Some("createTypeChecker")) + .collect(); + let names: Vec<&str> = nested.iter().map(|e| e.name.as_str()).collect(); + assert!( + names.contains(&"getIndexType"), + "getIndexType missing; names={:?}", + names + ); + assert!( + names.contains(&"getReturnType"), + "getReturnType missing; names={:?}", + names + ); + // silentNeverType is closure-state (call expression initializer) + assert!( + names.contains(&"silentNeverType"), + "silentNeverType missing; names={:?}", + names + ); + } + + #[test] + fn nested_fn_has_correct_kind() { + let source = r#" +export function outer(): void { + var state = createState(); + function inner(): void {} +} +"#; + let result = parse(source); + let inner_entry = result + .metadata + .exports + .iter() + .find(|e| e.name == "inner") + .expect("inner not found"); + assert_eq!(inner_entry.kind.as_deref(), Some("nested-fn")); + assert_eq!(inner_entry.parent_class.as_deref(), Some("outer")); + + let state_entry = result + .metadata + .exports + .iter() + .find(|e| e.name == "state") + .expect("state (closure-state) not found"); + assert_eq!(state_entry.kind.as_deref(), Some("closure-state")); + } + + #[test] + fn trivial_var_not_extracted_as_closure_state() { + let source = r#" +export function outer(): void { + let counter = 0; + var flag = false; + function inner(): void {} +} +"#; + let result = parse(source); + let names: Vec<&str> = result + .metadata + .exports + .iter() + .map(|e| e.name.as_str()) + .collect(); + // trivial literals must not appear + assert!( + !names.contains(&"counter"), + "trivial counter should not be extracted" + ); + assert!( + !names.contains(&"flag"), + "trivial flag should not be extracted" + ); + } + + #[test] + fn depth2_nested_fn_not_extracted() { + let source = r#" +export function outer(): void { + function depth1(): void { + function depth2(): void {} + } +} +"#; + let result = parse(source); + let names: Vec<&str> = result + .metadata + .exports + .iter() + .map(|e| e.name.as_str()) + .collect(); + assert!(!names.contains(&"depth2"), "depth2 should not be extracted"); + assert!(names.contains(&"depth1"), "depth1 should be extracted"); + } + + #[test] + fn prologue_var_after_first_nested_fn_not_extracted() { + let source = r#" +export function outer(): void { + var before = createA(); + function inner(): void {} + var after = createB(); +} +"#; + let result = parse(source); + let names: Vec<&str> = result + .metadata + .exports + .iter() + .map(|e| e.name.as_str()) + .collect(); + assert!( + names.contains(&"before"), + "before (prologue) should be extracted" + ); + assert!( + !names.contains(&"after"), + "after (post-first-fn) should not be extracted" + ); + } + + #[test] + fn nested_symbols_in_non_exported_function() { + let source = r#" +function internalHelper(): void { + var state = createState(); + function processItem(item: any): void {} +} +"#; + let result = parse(source); + let names: Vec<&str> = result + .metadata + .exports + .iter() + .map(|e| e.name.as_str()) + .collect(); + assert!( + names.contains(&"processItem"), + "processItem should be extracted" + ); + assert!( + names.contains(&"state"), + "state closure-state should be extracted" + ); + } } diff --git a/src/parser/mod.rs b/src/parser/mod.rs index ec0b30d..6fc695e 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -32,6 +32,12 @@ pub struct ExportEntry { /// The method renders under `methods:` in the sidecar as `ClassName.method: [start, end]`. #[serde(skip_serializing_if = "Option::is_none")] pub parent_class: Option, + /// ALP-922: kind tag for nested symbols inside function bodies. + /// "nested-fn" = depth-1 function declaration inside a function body. + /// "closure-state" = depth-1 non-trivial var/const/let prologue declaration. + /// None = regular top-level export or class method (existing behavior). + #[serde(skip_serializing_if = "Option::is_none")] + pub kind: Option, } impl ExportEntry { @@ -41,6 +47,7 @@ impl ExportEntry { start_line, end_line, parent_class: None, + kind: None, } } @@ -51,6 +58,34 @@ impl ExportEntry { start_line, end_line, parent_class: Some(parent_class), + kind: None, + } + } + + /// Create a depth-1 nested function declaration inside a function body. + pub fn nested_fn(name: String, start_line: usize, end_line: usize, parent_fn: String) -> Self { + Self { + name, + start_line, + end_line, + parent_class: Some(parent_fn), + kind: Some("nested-fn".to_string()), + } + } + + /// Create a depth-1 non-trivial var/const/let prologue declaration inside a function body. + pub fn closure_state( + name: String, + start_line: usize, + end_line: usize, + parent_fn: String, + ) -> Self { + Self { + name, + start_line, + end_line, + parent_class: Some(parent_fn), + kind: Some("closure-state".to_string()), } } } diff --git a/src/search.rs b/src/search.rs index 9b40a85..098c51d 100644 --- a/src/search.rs +++ b/src/search.rs @@ -98,7 +98,42 @@ pub fn bare_search(manifest: &Manifest, term: &str, limit: Option) -> Bar seen_exports.insert(term.to_string()); } - // 2. Fuzzy export matches — scored by relevance, capped at limit + // 2. Exact match in method_index by full dotted name (e.g. "createTypeChecker.getIndexType"). + let method_exact = manifest + .method_index + .get(term) + .map(|loc| (term.to_string(), loc.clone())); + if let Some((name, loc)) = method_exact { + if !seen_exports.contains(&name) { + export_hits.push(export_hit_from_location(&name, &loc)); + seen_exports.insert(name); + } + } + + // 2b. Fuzzy method_index search — dotted names that contain the term. + // This is how "silentNeverType" finds "createTypeChecker.silentNeverType". + let mut method_fuzzy: Vec<(u32, String, &ExportLocation)> = manifest + .method_index + .iter() + .filter(|(name, _)| !seen_exports.contains(name.as_str())) + .filter(|(name, _)| { + let name_lower = name.to_lowercase(); + name_lower.contains(&term_lower) + }) + .map(|(name, loc)| { + // Score against the last component (after the dot) for better relevance + let short = name.rfind('.').map(|p| &name[p + 1..]).unwrap_or(name); + (export_match_score(short, &term_lower), name.clone(), loc) + }) + .collect(); + method_fuzzy.sort_by(|(sa, na, _), (sb, nb, _)| sb.cmp(sa).then(na.cmp(nb))); + for (_, name, loc) in method_fuzzy.into_iter().take(cap) { + if seen_exports.insert(name.clone()) { + export_hits.push(export_hit_from_location(&name, loc)); + } + } + + // 3. Fuzzy export matches — scored by relevance, capped at limit let mut fuzzy: Vec<(u32, &str, &ExportLocation)> = manifest .export_locations .iter() @@ -114,6 +149,7 @@ pub fn bare_search(manifest: &Manifest, term: &str, limit: Option) -> Bar for (_, name, loc) in fuzzy.into_iter().take(cap) { export_hits.push(export_hit_from_location(name, loc)); + seen_exports.insert(name.to_string()); } // 3. File path matches diff --git a/tests/cli_integration.rs b/tests/cli_integration.rs index c083637..ceb23f5 100644 --- a/tests/cli_integration.rs +++ b/tests/cli_integration.rs @@ -106,7 +106,7 @@ fn generate_creates_db() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_exists(tmp.path())); assert!(db_indexed(tmp.path(), "src/auth.ts")); @@ -119,7 +119,7 @@ fn generate_indexes_exports() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_has_export(tmp.path(), "src/auth.ts", "validateUser")); assert!(db_has_export(tmp.path(), "src/auth.ts", "AuthService")); @@ -130,11 +130,11 @@ fn generate_skips_unchanged_files() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); let count_before = db_export_count(tmp.path(), "src/auth.ts"); // Generate again — source unchanged, export count should be identical - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); let count_after = db_export_count(tmp.path(), "src/auth.ts"); assert_eq!(count_before, count_after); @@ -145,7 +145,7 @@ fn generate_updates_stale_files() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); // Modify source to add a new export let auth_path = tmp.path().join("src/auth.ts"); @@ -154,7 +154,7 @@ fn generate_updates_stale_files() { fs::write(&auth_path, content).unwrap(); // Generate again — should detect the change and update - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_has_export(tmp.path(), "src/auth.ts", "NEW_EXPORT")); } @@ -164,7 +164,7 @@ fn generate_dry_run_creates_no_files() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], true, false).unwrap(); + fmm::cli::generate(&[path.to_string()], true, false, true).unwrap(); assert!(!db_exists(tmp.path())); } @@ -174,7 +174,7 @@ fn generate_dry_run_preserves_stale_db() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); // Modify source let auth_path = tmp.path().join("src/auth.ts"); @@ -182,7 +182,7 @@ fn generate_dry_run_preserves_stale_db() { content.push_str("\nexport const DRY_RUN_TEST = true;\n"); fs::write(&auth_path, content).unwrap(); - fmm::cli::generate(&[path.to_string()], true, false).unwrap(); + fmm::cli::generate(&[path.to_string()], true, false, true).unwrap(); // DB should NOT contain the new export (dry run) assert!(!db_has_export(tmp.path(), "src/auth.ts", "DRY_RUN_TEST")); @@ -193,7 +193,7 @@ fn validate_passes_after_generate() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); let result = fmm::cli::validate(&[path.to_string()]); assert!(result.is_ok()); } @@ -203,7 +203,7 @@ fn validate_fails_after_source_change() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); // Modify source to add a new export let auth_path = tmp.path().join("src/auth.ts"); @@ -220,7 +220,7 @@ fn clean_clears_db() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_indexed(tmp.path(), "src/auth.ts")); fmm::cli::clean(&[path.to_string()], false, false).unwrap(); @@ -234,7 +234,7 @@ fn clean_dry_run_preserves_db() { let tmp = setup_project(); let path = tmp.path().to_str().unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); fmm::cli::clean(&[path.to_string()], true, false).unwrap(); @@ -249,7 +249,7 @@ fn full_workflow_generate_validate_clean() { let path = tmp.path().to_str().unwrap(); // Generate - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_indexed(tmp.path(), "src/auth.ts")); // Validate (should pass) @@ -262,7 +262,7 @@ fn full_workflow_generate_validate_clean() { "export function newConnect() {}\nexport const NEW_SIZE = 20;\n", ) .unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); // Validate again (should pass after generate updates stale entry) fmm::cli::validate(&[path.to_string()]).unwrap(); @@ -287,7 +287,7 @@ fn respects_gitignore() { // Create .gitignore that ignores utils.py fs::write(tmp.path().join(".gitignore"), "src/utils.py\n").unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); // TypeScript files should be indexed assert!(db_indexed(tmp.path(), "src/auth.ts")); @@ -304,7 +304,7 @@ fn respects_fmmignore() { // Create .fmmignore that ignores db.ts fs::write(tmp.path().join(".fmmignore"), "src/db.ts\n").unwrap(); - fmm::cli::generate(&[path.to_string()], false, false).unwrap(); + fmm::cli::generate(&[path.to_string()], false, false, true).unwrap(); assert!(db_indexed(tmp.path(), "src/auth.ts")); assert!(!db_indexed(tmp.path(), "src/db.ts")); @@ -319,7 +319,13 @@ fn single_file_generate() { // (no .git/.fmmrc.json in a temp dir, so root = src/). let src_dir = tmp.path().join("src"); - fmm::cli::generate(&[file_path.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate( + &[file_path.to_str().unwrap().to_string()], + false, + false, + true, + ) + .unwrap(); // DB is at src/.fmm.db; file path stored relative to src/ assert!(db_indexed(&src_dir, "auth.ts")); diff --git a/tests/cross_language_validation.rs b/tests/cross_language_validation.rs index f406633..9b0c075 100644 --- a/tests/cross_language_validation.rs +++ b/tests/cross_language_validation.rs @@ -857,9 +857,12 @@ function mergeConfig(base: Record, overrides: Record fmm::manifest::Manifest { - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false) + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true) .expect("generate failed"); fmm::manifest::Manifest::load(root).unwrap_or_default() } diff --git a/tests/glossary.rs b/tests/glossary.rs index 9da8527..a3666f2 100644 --- a/tests/glossary.rs +++ b/tests/glossary.rs @@ -54,7 +54,7 @@ fn setup_glossary_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "export function formatDate(): string { return ''; }\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -99,7 +99,7 @@ fn setup_glossary_server_with_tests() -> (tempfile::TempDir, fmm::mcp::McpServer "from ..src.agent import run_dispatch\n\ndef test_dispatch_happy_path():\n pass\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -387,7 +387,7 @@ fn glossary_default_limit_is_ten() { let export = format!("item{i}"); write_file(root, &filename, &format!("export const {export} = {i};\n")); } - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); let text = call_tool_text(&server, "fmm_glossary", json!({"pattern": "item"})); // 11 matches, default limit 10 → truncation notice @@ -427,7 +427,7 @@ fn setup_method_glossary_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import { Injector } from '../src/injector';\nexport function testLoadInstance() {}\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -572,7 +572,7 @@ fn setup_bare_fn_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import { scheduleUpdate } from './scheduler';\n// never calls it\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -644,7 +644,7 @@ fn setup_workspace_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import { otherFlag } from './ReactFeatureFlags';\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -716,7 +716,7 @@ fn setup_disclosure_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import { otherThing } from './source';\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } diff --git a/tests/mcp_tools.rs b/tests/mcp_tools.rs index a3c56e2..5dbac6b 100644 --- a/tests/mcp_tools.rs +++ b/tests/mcp_tools.rs @@ -53,7 +53,7 @@ fn setup_mcp_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import bcrypt from 'bcrypt';\n\nexport function hashPassword(pw: string) {\n return bcrypt.hash(pw, 10);\n}\n\nexport function verifyPassword(pw: string, hash: string) {\n return bcrypt.compare(pw, hash);\n}\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -950,7 +950,7 @@ fn setup_go_mcp_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "package handler\n\nimport \"net/http\"\n\ntype Handler struct{}\n\nfunc NewHandler() *Handler {\n\treturn &Handler{}\n}\n\nfunc (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {}\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -1026,7 +1026,7 @@ fn setup_large_class_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { write_file(root, "src/service.ts", &source); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } @@ -1130,7 +1130,7 @@ fn setup_collision_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "export function createSession() {}\n", ); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); let server = fmm::mcp::McpServer::with_root(root.to_path_buf()); (tmp, server) } diff --git a/tests/named_import_precision.rs b/tests/named_import_precision.rs index 07854bd..da0a1dd 100644 --- a/tests/named_import_precision.rs +++ b/tests/named_import_precision.rs @@ -71,7 +71,7 @@ fn setup_precision_server() -> (tempfile::TempDir, fmm::mcp::McpServer) { "import * as WL from './WorkLoop';\nexport function debugSchedule(root: any) { WL.scheduleUpdate(root); }\n", ).unwrap(); - fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false).unwrap(); + fmm::cli::generate(&[root.to_str().unwrap().to_string()], false, false, true).unwrap(); (tmp, fmm::mcp::McpServer::with_root(root.to_path_buf())) }