Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 87 additions & 5 deletions src/commands/ai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,35 @@ afterEach(async () => {
});

function streamFrom(chunks: string[]) {
return fullStreamFrom(
chunks.map((chunk) => ({ type: 'text-delta' as const, text: chunk })),
);
}

function fullStreamFrom(
parts: Array<
| { type: 'text-delta'; text: string }
| { type: 'reasoning-start' }
| { type: 'reasoning-delta'; text: string }
| { type: 'reasoning-end' }
| { type: 'error'; error: unknown }
>,
) {
return {
async *[Symbol.asyncIterator]() {
for (const chunk of chunks) {
yield chunk;
for (const part of parts) {
yield part;
}
},
};
}

function createOutputCapture() {
function createOutputCapture(options: { isTTY?: boolean } = {}) {
const writes: string[] = [];
return {
writes,
stream: {
isTTY: options.isTTY ?? false,
write(value: string | Uint8Array) {
writes.push(typeof value === 'string' ? value : value.toString());
return true;
Expand Down Expand Up @@ -80,7 +95,7 @@ describe('askAi', () => {
process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key';

const streamText = vi.fn().mockReturnValue({
textStream: streamFrom(['hello']),
fullStream: streamFrom(['hello']),
});
const googleModel = vi.fn().mockReturnValue('google-model');
const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel);
Expand Down Expand Up @@ -126,6 +141,7 @@ describe('askAi', () => {
google: {
thinkingConfig: {
thinkingLevel: 'high',
includeThoughts: true,
},
},
},
Expand All @@ -139,7 +155,7 @@ describe('askAi', () => {
process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key';

const streamText = vi.fn().mockReturnValue({
textStream: streamFrom(['gateway']),
fullStream: streamFrom(['gateway']),
});
const createGoogleGenerativeAI = vi.fn();
const gatewayModel = vi.fn().mockReturnValue('gateway-model');
Expand Down Expand Up @@ -168,6 +184,72 @@ describe('askAi', () => {
expect(output.writes.join('')).toBe('gateway\n');
});

it('streams a TTY thinking preview with spinner frames', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
delete process.env.DUBSTACK_AI_GATEWAY_API_KEY;

const streamText = vi.fn().mockReturnValue({
fullStream: fullStreamFrom([
{ type: 'reasoning-start' },
{ type: 'reasoning-delta', text: 'Planning edits' },
{ type: 'reasoning-delta', text: ' and checks' },
{ type: 'reasoning-end' },
{ type: 'text-delta', text: 'Done.' },
]),
});
const googleModel = vi.fn().mockReturnValue('google-model');
const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel);
const createGateway = vi.fn();
const collectAiContext = vi.fn().mockResolvedValue(fakeContext);
const { createBashTool } = createBashToolMock();
const output = createOutputCapture({ isTTY: true });

await askAi('Explain this stack', dir, {
output: output.stream,
deps: {
streamText,
createGoogleGenerativeAI,
createGateway,
collectAiContext,
createBashTool,
},
});

const rendered = output.writes.join('');
expect(rendered).toContain('thinking:');
expect(rendered).toContain('\r');
expect(rendered.endsWith('Done.\n')).toBe(true);
});

it('throws when the stream emits an error part', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
delete process.env.DUBSTACK_AI_GATEWAY_API_KEY;

const streamText = vi.fn().mockReturnValue({
fullStream: fullStreamFrom([{ type: 'error', error: new Error('boom') }]),
});
const googleModel = vi.fn().mockReturnValue('google-model');
const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel);
const createGateway = vi.fn();
const collectAiContext = vi.fn().mockResolvedValue(fakeContext);
const { createBashTool } = createBashToolMock();

await expect(
askAi('Explain this stack', dir, {
output: createOutputCapture().stream,
deps: {
streamText,
createGoogleGenerativeAI,
createGateway,
collectAiContext,
createBashTool,
},
}),
).rejects.toThrow('boom');
});

it('requires at least one AI key environment variable', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
delete process.env.DUBSTACK_GEMINI_API_KEY;
Expand Down
100 changes: 97 additions & 3 deletions src/commands/ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { DubError } from '../lib/errors';

interface WritableLike {
write: (chunk: string | Uint8Array) => unknown;
isTTY?: boolean;
}

interface AskAiDependencies {
Expand Down Expand Up @@ -45,10 +46,13 @@ const THINKING_PROVIDER_OPTIONS = {
google: {
thinkingConfig: {
thinkingLevel: 'high' as const,
includeThoughts: true,
},
},
};

const SPINNER_FRAMES = ['-', '\\', '|', '/'] as const;

export async function askAi(
prompt: string,
cwd: string,
Expand Down Expand Up @@ -89,11 +93,39 @@ export async function askAi(
providerOptions: THINKING_PROVIDER_OPTIONS,
});

const thinkingRenderer = createThinkingRenderer(output);
let wroteOutput = false;
for await (const part of result.textStream) {
output.write(part);
wroteOutput = true;
for await (const part of result.fullStream) {
switch (part.type) {
case 'reasoning-start': {
thinkingRenderer.start();
break;
}
case 'reasoning-delta': {
thinkingRenderer.update(part.text);
break;
}
case 'reasoning-end': {
thinkingRenderer.stop();
break;
}
case 'text-delta': {
thinkingRenderer.pauseForText();
output.write(part.text);
wroteOutput = true;
break;
}
case 'error': {
throw part.error instanceof Error
? part.error
: new DubError('AI assistant stream failed unexpectedly.');
}
default: {
break;
}
}
}
thinkingRenderer.stop();

if (wroteOutput) {
output.write('\n');
Expand Down Expand Up @@ -134,3 +166,65 @@ function resolveModel(deps: AskAiDependencies): {
"AI assistant requires DUBSTACK_GEMINI_API_KEY or DUBSTACK_AI_GATEWAY_API_KEY. Run 'dub ai env --gemini-key <key>' or 'dub ai env --gateway-key <key>'.",
);
}

function createThinkingRenderer(output: WritableLike): {
start: () => void;
update: (delta: string) => void;
pauseForText: () => void;
stop: () => void;
} {
if (!output.isTTY) {
return {
start() {},
update() {},
pauseForText() {},
stop() {},
};
}

let spinnerIndex = 0;
let preview = '';
let lineLength = 0;
let active = false;
let hasRendered = false;

const clearLine = () => {
if (!hasRendered) return;
output.write(`\r${' '.repeat(lineLength)}\r`);
lineLength = 0;
hasRendered = false;
};

const render = () => {
const frame = SPINNER_FRAMES[spinnerIndex];
spinnerIndex = (spinnerIndex + 1) % SPINNER_FRAMES.length;

const summary =
preview.length > 96 ? `${preview.slice(0, 93)}...` : preview;
const line = `${frame} thinking: ${summary || 'working...'}`;
output.write(`\r${line}`);
lineLength = line.length;
hasRendered = true;
};

return {
start() {
if (active) return;
active = true;
render();
},
update(delta: string) {
if (!active) return;
preview += delta;
render();
},
pauseForText() {
clearLine();
},
stop() {
active = false;
preview = '';
clearLine();
},
};
}
5 changes: 4 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ import { DubError } from './lib/errors';
import { getCurrentBranch } from './lib/git';
import {
appendHistoryEntry,
normalizeHistoryLine,
redactSensitiveText,
sanitizeCommandArgs,
} from './lib/history';
Expand Down Expand Up @@ -1146,8 +1147,10 @@ function beginHistoryCapture(): void {
const originalStderrWrite = process.stderr.write.bind(process.stderr);

const captureLine = (line: string) => {
const normalized = normalizeHistoryLine(line);
if (normalized.length === 0) return;
if (output.length >= MAX_HISTORY_OUTPUT_LINES) return;
output.push(truncateHistoryLine(redactSensitiveText(line)));
output.push(truncateHistoryLine(redactSensitiveText(normalized)));
};

const captureChunk = (
Expand Down
15 changes: 15 additions & 0 deletions src/lib/history.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { afterEach, beforeEach, describe, expect, it } from 'vitest';
import { createTestRepo } from '../../test/helpers';
import {
appendHistoryEntry,
normalizeHistoryLine,
readHistory,
redactSensitiveText,
sanitizeCommandArgs,
Expand Down Expand Up @@ -76,4 +77,18 @@ describe('history', () => {
expect(redacted).not.toContain('super-secret-token');
expect(redacted).toContain('[REDACTED]');
});

it('normalizes carriage-return spinner lines to the final visible content', () => {
expect(normalizeHistoryLine('- thinking\r\\ thinking\rfinal output')).toBe(
'final output',
);
});

it('returns empty string when normalized content is whitespace only', () => {
expect(normalizeHistoryLine('progress\r \t ')).toBe('');
});

it('keeps regular lines without carriage returns unchanged', () => {
expect(normalizeHistoryLine('plain output line')).toBe('plain output line');
});
});
5 changes: 5 additions & 0 deletions src/lib/history.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,8 @@ export function redactSensitiveText(value: string): string {

return redacted;
}

export function normalizeHistoryLine(line: string): string {
const visible = line.split('\r').pop() ?? '';
return visible.trim().length === 0 ? '' : visible;
}