diff --git a/src/commands/ai.test.ts b/src/commands/ai.test.ts index 9cfe02b..fd18a83 100644 --- a/src/commands/ai.test.ts +++ b/src/commands/ai.test.ts @@ -21,20 +21,35 @@ afterEach(async () => { }); function streamFrom(chunks: string[]) { + return fullStreamFrom( + chunks.map((chunk) => ({ type: 'text-delta' as const, text: chunk })), + ); +} + +function fullStreamFrom( + parts: Array< + | { type: 'text-delta'; text: string } + | { type: 'reasoning-start' } + | { type: 'reasoning-delta'; text: string } + | { type: 'reasoning-end' } + | { type: 'error'; error: unknown } + >, +) { return { async *[Symbol.asyncIterator]() { - for (const chunk of chunks) { - yield chunk; + for (const part of parts) { + yield part; } }, }; } -function createOutputCapture() { +function createOutputCapture(options: { isTTY?: boolean } = {}) { const writes: string[] = []; return { writes, stream: { + isTTY: options.isTTY ?? false, write(value: string | Uint8Array) { writes.push(typeof value === 'string' ? value : value.toString()); return true; @@ -80,7 +95,7 @@ describe('askAi', () => { process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key'; const streamText = vi.fn().mockReturnValue({ - textStream: streamFrom(['hello']), + fullStream: streamFrom(['hello']), }); const googleModel = vi.fn().mockReturnValue('google-model'); const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel); @@ -126,6 +141,7 @@ describe('askAi', () => { google: { thinkingConfig: { thinkingLevel: 'high', + includeThoughts: true, }, }, }, @@ -139,7 +155,7 @@ describe('askAi', () => { process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key'; const streamText = vi.fn().mockReturnValue({ - textStream: streamFrom(['gateway']), + fullStream: streamFrom(['gateway']), }); const createGoogleGenerativeAI = vi.fn(); const gatewayModel = vi.fn().mockReturnValue('gateway-model'); @@ -168,6 +184,72 @@ describe('askAi', () => { expect(output.writes.join('')).toBe('gateway\n'); }); + it('streams a TTY thinking preview with spinner frames', async () => { + await writeConfig({ aiAssistantEnabled: true }, dir); + process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key'; + delete process.env.DUBSTACK_AI_GATEWAY_API_KEY; + + const streamText = vi.fn().mockReturnValue({ + fullStream: fullStreamFrom([ + { type: 'reasoning-start' }, + { type: 'reasoning-delta', text: 'Planning edits' }, + { type: 'reasoning-delta', text: ' and checks' }, + { type: 'reasoning-end' }, + { type: 'text-delta', text: 'Done.' }, + ]), + }); + const googleModel = vi.fn().mockReturnValue('google-model'); + const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel); + const createGateway = vi.fn(); + const collectAiContext = vi.fn().mockResolvedValue(fakeContext); + const { createBashTool } = createBashToolMock(); + const output = createOutputCapture({ isTTY: true }); + + await askAi('Explain this stack', dir, { + output: output.stream, + deps: { + streamText, + createGoogleGenerativeAI, + createGateway, + collectAiContext, + createBashTool, + }, + }); + + const rendered = output.writes.join(''); + expect(rendered).toContain('thinking:'); + expect(rendered).toContain('\r'); + expect(rendered.endsWith('Done.\n')).toBe(true); + }); + + it('throws when the stream emits an error part', async () => { + await writeConfig({ aiAssistantEnabled: true }, dir); + process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key'; + delete process.env.DUBSTACK_AI_GATEWAY_API_KEY; + + const streamText = vi.fn().mockReturnValue({ + fullStream: fullStreamFrom([{ type: 'error', error: new Error('boom') }]), + }); + const googleModel = vi.fn().mockReturnValue('google-model'); + const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel); + const createGateway = vi.fn(); + const collectAiContext = vi.fn().mockResolvedValue(fakeContext); + const { createBashTool } = createBashToolMock(); + + await expect( + askAi('Explain this stack', dir, { + output: createOutputCapture().stream, + deps: { + streamText, + createGoogleGenerativeAI, + createGateway, + collectAiContext, + createBashTool, + }, + }), + ).rejects.toThrow('boom'); + }); + it('requires at least one AI key environment variable', async () => { await writeConfig({ aiAssistantEnabled: true }, dir); delete process.env.DUBSTACK_GEMINI_API_KEY; diff --git a/src/commands/ai.ts b/src/commands/ai.ts index fa2fdbb..d77331c 100644 --- a/src/commands/ai.ts +++ b/src/commands/ai.ts @@ -13,6 +13,7 @@ import { DubError } from '../lib/errors'; interface WritableLike { write: (chunk: string | Uint8Array) => unknown; + isTTY?: boolean; } interface AskAiDependencies { @@ -45,10 +46,13 @@ const THINKING_PROVIDER_OPTIONS = { google: { thinkingConfig: { thinkingLevel: 'high' as const, + includeThoughts: true, }, }, }; +const SPINNER_FRAMES = ['-', '\\', '|', '/'] as const; + export async function askAi( prompt: string, cwd: string, @@ -89,11 +93,39 @@ export async function askAi( providerOptions: THINKING_PROVIDER_OPTIONS, }); + const thinkingRenderer = createThinkingRenderer(output); let wroteOutput = false; - for await (const part of result.textStream) { - output.write(part); - wroteOutput = true; + for await (const part of result.fullStream) { + switch (part.type) { + case 'reasoning-start': { + thinkingRenderer.start(); + break; + } + case 'reasoning-delta': { + thinkingRenderer.update(part.text); + break; + } + case 'reasoning-end': { + thinkingRenderer.stop(); + break; + } + case 'text-delta': { + thinkingRenderer.pauseForText(); + output.write(part.text); + wroteOutput = true; + break; + } + case 'error': { + throw part.error instanceof Error + ? part.error + : new DubError('AI assistant stream failed unexpectedly.'); + } + default: { + break; + } + } } + thinkingRenderer.stop(); if (wroteOutput) { output.write('\n'); @@ -134,3 +166,65 @@ function resolveModel(deps: AskAiDependencies): { "AI assistant requires DUBSTACK_GEMINI_API_KEY or DUBSTACK_AI_GATEWAY_API_KEY. Run 'dub ai env --gemini-key ' or 'dub ai env --gateway-key '.", ); } + +function createThinkingRenderer(output: WritableLike): { + start: () => void; + update: (delta: string) => void; + pauseForText: () => void; + stop: () => void; +} { + if (!output.isTTY) { + return { + start() {}, + update() {}, + pauseForText() {}, + stop() {}, + }; + } + + let spinnerIndex = 0; + let preview = ''; + let lineLength = 0; + let active = false; + let hasRendered = false; + + const clearLine = () => { + if (!hasRendered) return; + output.write(`\r${' '.repeat(lineLength)}\r`); + lineLength = 0; + hasRendered = false; + }; + + const render = () => { + const frame = SPINNER_FRAMES[spinnerIndex]; + spinnerIndex = (spinnerIndex + 1) % SPINNER_FRAMES.length; + + const summary = + preview.length > 96 ? `${preview.slice(0, 93)}...` : preview; + const line = `${frame} thinking: ${summary || 'working...'}`; + output.write(`\r${line}`); + lineLength = line.length; + hasRendered = true; + }; + + return { + start() { + if (active) return; + active = true; + render(); + }, + update(delta: string) { + if (!active) return; + preview += delta; + render(); + }, + pauseForText() { + clearLine(); + }, + stop() { + active = false; + preview = ''; + clearLine(); + }, + }; +} diff --git a/src/index.ts b/src/index.ts index ef8768e..8945979 100644 --- a/src/index.ts +++ b/src/index.ts @@ -55,6 +55,7 @@ import { DubError } from './lib/errors'; import { getCurrentBranch } from './lib/git'; import { appendHistoryEntry, + normalizeHistoryLine, redactSensitiveText, sanitizeCommandArgs, } from './lib/history'; @@ -1146,8 +1147,10 @@ function beginHistoryCapture(): void { const originalStderrWrite = process.stderr.write.bind(process.stderr); const captureLine = (line: string) => { + const normalized = normalizeHistoryLine(line); + if (normalized.length === 0) return; if (output.length >= MAX_HISTORY_OUTPUT_LINES) return; - output.push(truncateHistoryLine(redactSensitiveText(line))); + output.push(truncateHistoryLine(redactSensitiveText(normalized))); }; const captureChunk = ( diff --git a/src/lib/history.test.ts b/src/lib/history.test.ts index a79391c..740ca6a 100644 --- a/src/lib/history.test.ts +++ b/src/lib/history.test.ts @@ -2,6 +2,7 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest'; import { createTestRepo } from '../../test/helpers'; import { appendHistoryEntry, + normalizeHistoryLine, readHistory, redactSensitiveText, sanitizeCommandArgs, @@ -76,4 +77,18 @@ describe('history', () => { expect(redacted).not.toContain('super-secret-token'); expect(redacted).toContain('[REDACTED]'); }); + + it('normalizes carriage-return spinner lines to the final visible content', () => { + expect(normalizeHistoryLine('- thinking\r\\ thinking\rfinal output')).toBe( + 'final output', + ); + }); + + it('returns empty string when normalized content is whitespace only', () => { + expect(normalizeHistoryLine('progress\r \t ')).toBe(''); + }); + + it('keeps regular lines without carriage returns unchanged', () => { + expect(normalizeHistoryLine('plain output line')).toBe('plain output line'); + }); }); diff --git a/src/lib/history.ts b/src/lib/history.ts index b542dd0..7cd8f29 100644 --- a/src/lib/history.ts +++ b/src/lib/history.ts @@ -138,3 +138,8 @@ export function redactSensitiveText(value: string): string { return redacted; } + +export function normalizeHistoryLine(line: string): string { + const visible = line.split('\r').pop() ?? ''; + return visible.trim().length === 0 ? '' : visible; +}