-
Notifications
You must be signed in to change notification settings - Fork 5
test(phase2): rigorous coverage for importDetails + 2-hop + watcher queue #53
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
7a9af7a
2f0b319
d42e173
18b1f01
305bb74
32ab51b
b899dcc
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,40 @@ | ||
| export interface AutoRefreshController { | ||
| /** | ||
| * Called when a file watcher detects a change. | ||
| * Returns true when an incremental refresh should run immediately. | ||
| */ | ||
| onFileChange: (isIndexing: boolean) => boolean; | ||
| /** | ||
| * Called after an indexing run completes. | ||
| * Returns true when a queued incremental refresh should run next. | ||
| */ | ||
| consumeQueuedRefresh: (indexStatus: 'ready' | 'error' | 'idle' | 'indexing') => boolean; | ||
| /** Clears any queued refresh. */ | ||
| reset: () => void; | ||
| } | ||
|
|
||
| export function createAutoRefreshController(): AutoRefreshController { | ||
| let queued = false; | ||
|
|
||
| return { | ||
| onFileChange: (isIndexing: boolean) => { | ||
| if (isIndexing) { | ||
| queued = true; | ||
| return false; | ||
| } | ||
| return true; | ||
| }, | ||
| consumeQueuedRefresh: (indexStatus) => { | ||
| if (indexStatus === 'indexing') { | ||
| // Defensive: if called while indexing, do not clear the queue. | ||
| return false; | ||
| } | ||
| const shouldRun = queued && indexStatus === 'ready'; | ||
| queued = false; | ||
| return shouldRun; | ||
| }, | ||
| reset: () => { | ||
| queued = false; | ||
| } | ||
| }; | ||
| } |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,31 @@ | ||
| import { describe, it, expect } from 'vitest'; | ||
| import { createAutoRefreshController } from '../src/core/auto-refresh.js'; | ||
|
|
||
| describe('AutoRefreshController', () => { | ||
| it('runs immediately when not indexing', () => { | ||
| const controller = createAutoRefreshController(); | ||
| expect(controller.onFileChange(false)).toBe(true); | ||
| }); | ||
|
|
||
| it('queues when indexing and runs after ready', () => { | ||
| const controller = createAutoRefreshController(); | ||
| expect(controller.onFileChange(true)).toBe(false); | ||
| expect(controller.consumeQueuedRefresh('indexing')).toBe(false); | ||
| expect(controller.consumeQueuedRefresh('ready')).toBe(true); | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
This new test case is internally inconsistent and will fail every run: Useful? React with 👍 / 👎. |
||
| }); | ||
|
|
||
| it('does not run queued refresh if indexing failed', () => { | ||
| const controller = createAutoRefreshController(); | ||
| expect(controller.onFileChange(true)).toBe(false); | ||
| expect(controller.consumeQueuedRefresh('error')).toBe(false); | ||
| }); | ||
|
|
||
| it('coalesces multiple changes into one queued refresh', () => { | ||
| const controller = createAutoRefreshController(); | ||
| expect(controller.onFileChange(true)).toBe(false); | ||
| expect(controller.onFileChange(true)).toBe(false); | ||
| expect(controller.consumeQueuedRefresh('ready')).toBe(true); | ||
| expect(controller.consumeQueuedRefresh('ready')).toBe(false); | ||
| }); | ||
| }); | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,119 @@ | ||
| import { describe, it, expect, beforeEach, afterEach } from 'vitest'; | ||
| import { promises as fs } from 'fs'; | ||
| import path from 'path'; | ||
| import { CodebaseIndexer } from '../src/core/indexer.js'; | ||
| import { dispatchTool } from '../src/tools/index.js'; | ||
| import type { ToolContext } from '../src/tools/types.js'; | ||
| import { | ||
| CODEBASE_CONTEXT_DIRNAME, | ||
| INTELLIGENCE_FILENAME, | ||
| KEYWORD_INDEX_FILENAME, | ||
| VECTOR_DB_DIRNAME, | ||
| MEMORY_FILENAME, | ||
| RELATIONSHIPS_FILENAME | ||
| } from '../src/constants/codebase-context.js'; | ||
|
|
||
| describe('Impact candidates (2-hop)', () => { | ||
| let tempRoot: string | null = null; | ||
| const token = 'UNIQUETOKEN123'; | ||
|
|
||
| beforeEach(async () => { | ||
| // Keep test artifacts under CWD (mirrors other indexer tests and avoids OS tmp quirks) | ||
| tempRoot = await fs.mkdtemp(path.join(process.cwd(), '.tmp-impact-2hop-')); | ||
| const srcDir = path.join(tempRoot, 'src'); | ||
| await fs.mkdir(srcDir, { recursive: true }); | ||
| await fs.writeFile(path.join(tempRoot, 'package.json'), JSON.stringify({ name: 'impact-2hop' })); | ||
|
|
||
| await fs.writeFile( | ||
| path.join(srcDir, 'c.ts'), | ||
| `export function cFn() { return '${token}'; }\n` | ||
| ); | ||
| await fs.writeFile(path.join(srcDir, 'b.ts'), `import { cFn } from './c';\nexport const b = cFn();\n`); | ||
| await fs.writeFile(path.join(srcDir, 'a.ts'), `import { b } from './b';\nexport const a = b;\n`); | ||
|
|
||
| const indexer = new CodebaseIndexer({ | ||
| rootPath: tempRoot, | ||
| config: { skipEmbedding: true } | ||
| }); | ||
| await indexer.index(); | ||
| }); | ||
|
|
||
| afterEach(async () => { | ||
| if (tempRoot) { | ||
| await fs.rm(tempRoot, { recursive: true, force: true }); | ||
| tempRoot = null; | ||
| } | ||
| }); | ||
|
|
||
| it('includes hop 1 and hop 2 candidates in preflight impact.details', async () => { | ||
| if (!tempRoot) throw new Error('tempRoot not initialized'); | ||
|
|
||
| const rootPath = tempRoot; | ||
| const paths = { | ||
| baseDir: path.join(rootPath, CODEBASE_CONTEXT_DIRNAME), | ||
| memory: path.join(rootPath, CODEBASE_CONTEXT_DIRNAME, MEMORY_FILENAME), | ||
| intelligence: path.join(rootPath, CODEBASE_CONTEXT_DIRNAME, INTELLIGENCE_FILENAME), | ||
| keywordIndex: path.join(rootPath, CODEBASE_CONTEXT_DIRNAME, KEYWORD_INDEX_FILENAME), | ||
| vectorDb: path.join(rootPath, CODEBASE_CONTEXT_DIRNAME, VECTOR_DB_DIRNAME) | ||
| }; | ||
|
|
||
| const ctx: ToolContext = { | ||
| indexState: { status: 'ready' }, | ||
| paths, | ||
| rootPath, | ||
| performIndexing: () => {} | ||
| }; | ||
|
|
||
| const relationshipsPath = path.join(rootPath, CODEBASE_CONTEXT_DIRNAME, RELATIONSHIPS_FILENAME); | ||
| const relationshipsRaw = await fs.readFile(relationshipsPath, 'utf-8'); | ||
| const relationships = JSON.parse(relationshipsRaw) as { | ||
| graph?: { imports?: Record<string, string[]> }; | ||
| }; | ||
| const imports = relationships.graph?.imports ?? {}; | ||
| const hasInternalEdge = | ||
| (imports['src/b.ts'] ?? []).some((d) => d.endsWith('src/c.ts') || d === 'src/c.ts') && | ||
| (imports['src/a.ts'] ?? []).some((d) => d.endsWith('src/b.ts') || d === 'src/b.ts'); | ||
| if (!hasInternalEdge) { | ||
| throw new Error( | ||
| `Expected relationships graph to include src/a.ts -> src/b.ts and src/b.ts -> src/c.ts, got imports keys=${JSON.stringify( | ||
| Object.keys(imports) | ||
| )}` | ||
| ); | ||
| } | ||
|
|
||
| const resp = await dispatchTool( | ||
| 'search_codebase', | ||
| { query: token, intent: 'edit', includeSnippets: false, limit: 1 }, | ||
| ctx | ||
| ); | ||
|
|
||
| const text = resp.content?.[0]?.text ?? ''; | ||
| const parsed = JSON.parse(text) as { | ||
| status?: string; | ||
| results?: Array<{ file?: string }>; | ||
| preflight?: { impact?: { details?: Array<{ file: string; hop: 1 | 2 }> } }; | ||
| }; | ||
| const results = parsed.results ?? []; | ||
| if (!Array.isArray(results) || results.length === 0) { | ||
| throw new Error( | ||
| `Expected at least one search result for token, got status=${String(parsed.status)} results=${JSON.stringify( | ||
| results | ||
| )}` | ||
| ); | ||
| } | ||
| const details = parsed.preflight?.impact?.details ?? []; | ||
|
|
||
| const hasHop1 = details.some((d) => d.file.endsWith('src/b.ts') && d.hop === 1); | ||
| if (!hasHop1) { | ||
| throw new Error( | ||
| `Expected hop 1 candidate src/b.ts, got impact.details=${JSON.stringify(details)}` | ||
| ); | ||
| } | ||
| const hasHop2 = details.some((d) => d.file.endsWith('src/a.ts') && d.hop === 2); | ||
| if (!hasHop2) { | ||
| throw new Error( | ||
| `Expected hop 2 candidate src/a.ts, got impact.details=${JSON.stringify(details)}` | ||
| ); | ||
| } | ||
| }); | ||
| }); |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,28 @@ | ||
| import { describe, it, expect } from 'vitest'; | ||
| import path from 'path'; | ||
| import os from 'os'; | ||
| import { InternalFileGraph } from '../src/utils/usage-tracker.js'; | ||
|
|
||
| describe('InternalFileGraph serialization', () => { | ||
| it('round-trips importDetails and importedSymbols behavior', () => { | ||
| const rootPath = path.join(os.tmpdir(), `ifg-${Date.now()}`); | ||
| const graph = new InternalFileGraph(rootPath); | ||
|
|
||
| const exportedFile = path.join(rootPath, 'src', 'exported.ts'); | ||
| const importingFile = path.join(rootPath, 'src', 'importer.ts'); | ||
|
|
||
| graph.trackExports(exportedFile, [{ name: 'Foo', type: 'function' }]); | ||
| graph.trackImport(importingFile, exportedFile, 12, ['Foo']); | ||
|
|
||
| const json = graph.toJSON(); | ||
| expect(json.importDetails).toBeDefined(); | ||
|
|
||
| const restored = InternalFileGraph.fromJSON(json, rootPath); | ||
| const restoredJson = restored.toJSON(); | ||
| expect(restoredJson.importDetails).toEqual(json.importDetails); | ||
|
|
||
| const unused = restored.findUnusedExports(); | ||
| expect(unused.length).toBe(0); | ||
| }); | ||
| }); | ||
|
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
calling
consumeQueuedRefreshclears the queue regardless of status, so line 14 will return false (not true as expected). Remove this line - you should only callconsumeQueuedRefreshonce after indexing completes.