Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 12 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -571,15 +571,15 @@ To inspect your repository, `dub ai ask` can invoke a constrained shell tool lim
The assistant cannot execute arbitrary shell commands; requests outside this allow-list are rejected, and additional safety checks block destructive command patterns.

Provider/key selection:
- If `DUBSTACK_GEMINI_API_KEY` is set, DubStack uses direct Google provider access (`gemini-3-flash`).
- Otherwise, if `DUBSTACK_AI_GATEWAY_API_KEY` is set, DubStack uses Vercel AI Gateway (`google/gemini-3-flash`).
- If `DUBSTACK_GEMINI_API_KEY` is set, DubStack uses direct Google provider access with model from `DUBSTACK_GEMINI_MODEL` (default: `gemini-3-flash-preview`).
- Otherwise, if `DUBSTACK_AI_GATEWAY_API_KEY` is set, DubStack uses Vercel AI Gateway with model from `DUBSTACK_AI_GATEWAY_MODEL` (default: `google/gemini-3-flash`).
- If both are set, DubStack prefers `DUBSTACK_GEMINI_API_KEY`.

Thinking is enabled by default for Gemini 3 Flash.
Comment thread
dubscode marked this conversation as resolved.

### `dub ai env`

Write DubStack AI keys into your shell profile (macOS/Linux shells).
Write DubStack AI keys/models into your shell profile (macOS/Linux shells).

```bash
# write Gemini key
Expand All @@ -588,9 +588,18 @@ dub ai env --gemini-key "<your-key>"
# write Gateway key
dub ai env --gateway-key "<your-key>"

# write Gemini model override
dub ai env --gemini-model "gemini-2.5-pro-preview"

# write Gateway model override
dub ai env --gateway-model "google/gemini-2.5-pro"

# write both
dub ai env --gemini-key "<gemini-key>" --gateway-key "<gateway-key>"

# write key + model together
dub ai env --gemini-key "<gemini-key>" --gemini-model "gemini-3-flash-preview"

# target a specific profile file explicitly
dub ai env --gemini-key "<your-key>" --profile ~/.zshrc
```
Expand Down
71 changes: 69 additions & 2 deletions src/commands/ai-env.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,77 @@ describe('configureAiEnv', () => {
);
});

it('throws when no key is provided', async () => {
it('writes model exports without keys', async () => {
const profile = path.join(tempDir, '.zshrc');
await fs.promises.writeFile(profile, '# existing\n');

const result = await configureAiEnv({
geminiModel: 'gemini-2.5-flash',
gatewayModel: 'google/gemini-3-flash',
profile,
});
const updated = await fs.promises.readFile(profile, 'utf8');

expect(result.updated).toEqual([
'DUBSTACK_GEMINI_MODEL',
'DUBSTACK_AI_GATEWAY_MODEL',
]);
expect(updated).toContain(
"export DUBSTACK_GEMINI_MODEL='gemini-2.5-flash'",
);
expect(updated).toContain(
"export DUBSTACK_AI_GATEWAY_MODEL='google/gemini-3-flash'",
);
});

it('updates key and model exports together', async () => {
const profile = path.join(tempDir, '.bashrc');
await fs.promises.writeFile(
profile,
"export DUBSTACK_GEMINI_MODEL='old'\n",
);

const result = await configureAiEnv({
geminiKey: 'new-gemini-key',
geminiModel: 'gemini-3-flash-preview',
profile,
});
const updated = await fs.promises.readFile(profile, 'utf8');

expect(result.updated).toEqual([
'DUBSTACK_GEMINI_API_KEY',
'DUBSTACK_GEMINI_MODEL',
]);
expect(updated.match(/DUBSTACK_GEMINI_MODEL/g)?.length).toBe(1);
expect(updated).toContain(
"export DUBSTACK_GEMINI_API_KEY='new-gemini-key'",
);
expect(updated).toContain(
"export DUBSTACK_GEMINI_MODEL='gemini-3-flash-preview'",
);
});

it('rejects empty model values', async () => {
const profile = path.join(tempDir, '.zshrc');
await expect(
configureAiEnv({ profile, geminiModel: ' ' }),
).rejects.toThrow('Gemini model cannot be empty');
});

it('rejects gateway-style Gemini model names', async () => {
const profile = path.join(tempDir, '.zshrc');
await expect(
configureAiEnv({
profile,
geminiModel: 'google/gemini-2.5-pro',
}),
).rejects.toThrow("Gemini model should not include '/'");
});

it('throws when no key or model is provided', async () => {
const profile = path.join(tempDir, '.zshrc');
await expect(configureAiEnv({ profile })).rejects.toThrow(
'Provide at least one key',
'Provide at least one key or model',
);
});
});
46 changes: 44 additions & 2 deletions src/commands/ai-env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,14 @@ import { DubError } from '../lib/errors';

const GEMINI_KEY_NAME = 'DUBSTACK_GEMINI_API_KEY';
const GATEWAY_KEY_NAME = 'DUBSTACK_AI_GATEWAY_API_KEY';
const GEMINI_MODEL_NAME = 'DUBSTACK_GEMINI_MODEL';
const GATEWAY_MODEL_NAME = 'DUBSTACK_AI_GATEWAY_MODEL';

interface ConfigureAiEnvOptions {
geminiKey?: string;
gatewayKey?: string;
geminiModel?: string;
gatewayModel?: string;
shell?: string;
profile?: string;
}
Expand All @@ -21,9 +25,14 @@ interface ConfigureAiEnvResult {
export async function configureAiEnv(
options: ConfigureAiEnvOptions,
): Promise<ConfigureAiEnvResult> {
if (!options.geminiKey && !options.gatewayKey) {
if (
!options.geminiKey &&
!options.gatewayKey &&
!options.geminiModel &&
!options.gatewayModel
) {
throw new DubError(
'Provide at least one key via --gemini-key or --gateway-key.',
'Provide at least one key or model via --gemini-key, --gateway-key, --gemini-model, or --gateway-model.',
);
}

Expand All @@ -48,6 +57,18 @@ export async function configureAiEnv(
updated.push(GATEWAY_KEY_NAME);
}

if (options.geminiModel !== undefined) {
const model = normalizeGeminiModel(options.geminiModel);
content = upsertExport(content, GEMINI_MODEL_NAME, model);
updated.push(GEMINI_MODEL_NAME);
}

if (options.gatewayModel !== undefined) {
const model = normalizeGatewayModel(options.gatewayModel);
content = upsertExport(content, GATEWAY_MODEL_NAME, model);
updated.push(GATEWAY_MODEL_NAME);
}

if (!content.endsWith('\n')) {
content += '\n';
}
Expand Down Expand Up @@ -97,3 +118,24 @@ function upsertExport(content: string, key: string, value: string): string {
function quoteForShell(value: string): string {
return `'${value.replaceAll("'", "'\"'\"'")}'`;
}

function normalizeGeminiModel(value: string): string {
const model = value.trim();
if (model.length === 0) {
throw new DubError('Gemini model cannot be empty.');
}
if (model.includes('/')) {
throw new DubError(
"Gemini model should not include '/'. Use names like 'gemini-3-flash-preview'.",
);
}
return model;
}

function normalizeGatewayModel(value: string): string {
const model = value.trim();
if (model.length === 0) {
throw new DubError('Gateway model cannot be empty.');
}
return model;
}
62 changes: 61 additions & 1 deletion src/commands/ai.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ describe('askAi', () => {
expect(createGoogleGenerativeAI).toHaveBeenCalledWith({
apiKey: 'gem-key',
});
expect(googleModel).toHaveBeenCalledWith('gemini-3-flash');
expect(googleModel).toHaveBeenCalledWith('gemini-3-flash-preview');
expect(createGateway).not.toHaveBeenCalled();
expect(createBashTool).toHaveBeenCalledWith(
expect.objectContaining({
Expand Down Expand Up @@ -151,6 +151,36 @@ describe('askAi', () => {
);
});

it('uses DUBSTACK_GEMINI_MODEL override when provided', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
process.env.DUBSTACK_GEMINI_MODEL = 'gemini-2.5-pro-preview';
delete process.env.DUBSTACK_AI_GATEWAY_API_KEY;

const streamText = vi.fn().mockReturnValue({
fullStream: streamFrom(['hello']),
});
const googleModel = vi.fn().mockReturnValue('google-model');
const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel);
const createGateway = vi.fn();
const collectAiContext = vi.fn().mockResolvedValue(fakeContext);
const { createBashTool } = createBashToolMock();

const result = await askAi('Explain this stack', dir, {
output: createOutputCapture().stream,
deps: {
streamText,
createGoogleGenerativeAI,
createGateway,
collectAiContext,
createBashTool,
},
});

expect(googleModel).toHaveBeenCalledWith('gemini-2.5-pro-preview');
expect(result.modelId).toBe('gemini-2.5-pro-preview');
});

it('uses AI Gateway provider when only DUBSTACK_AI_GATEWAY_API_KEY is set', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
delete process.env.DUBSTACK_GEMINI_API_KEY;
Expand Down Expand Up @@ -186,6 +216,36 @@ describe('askAi', () => {
expect(output.writes.join('')).toBe('gateway\n');
});

it('uses DUBSTACK_AI_GATEWAY_MODEL override when provided', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
delete process.env.DUBSTACK_GEMINI_API_KEY;
process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key';
process.env.DUBSTACK_AI_GATEWAY_MODEL = 'google/gemini-2.5-pro';

const streamText = vi.fn().mockReturnValue({
fullStream: streamFrom(['gateway']),
});
const createGoogleGenerativeAI = vi.fn();
const gatewayModel = vi.fn().mockReturnValue('gateway-model');
const createGateway = vi.fn().mockReturnValue(gatewayModel);
const collectAiContext = vi.fn().mockResolvedValue(fakeContext);
const { createBashTool } = createBashToolMock();

const result = await askAi('Explain this stack', dir, {
output: createOutputCapture().stream,
deps: {
streamText,
createGoogleGenerativeAI,
createGateway,
collectAiContext,
createBashTool,
},
});

expect(gatewayModel).toHaveBeenCalledWith('google/gemini-2.5-pro');
expect(result.modelId).toBe('google/gemini-2.5-pro');
});

it('streams a TTY thinking preview with spinner frames', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
Expand Down
12 changes: 8 additions & 4 deletions src/commands/ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -206,21 +206,25 @@ function resolveModel(deps: AskAiDependencies): {
} {
const geminiApiKey = process.env.DUBSTACK_GEMINI_API_KEY?.trim();
if (geminiApiKey) {
const geminiModel =
process.env.DUBSTACK_GEMINI_MODEL?.trim() || 'gemini-3-flash-preview';
const google = deps.createGoogleGenerativeAI({ apiKey: geminiApiKey });
return {
provider: 'google',
model: google('gemini-3-flash'),
modelId: 'gemini-3-flash',
model: google(geminiModel),
modelId: geminiModel,
};
}

const gatewayApiKey = process.env.DUBSTACK_AI_GATEWAY_API_KEY?.trim();
if (gatewayApiKey) {
const gatewayModel =
process.env.DUBSTACK_AI_GATEWAY_MODEL?.trim() || 'google/gemini-3-flash';
const gateway = deps.createGateway({ apiKey: gatewayApiKey });
return {
provider: 'gateway',
model: gateway('google/gemini-3-flash'),
modelId: 'google/gemini-3-flash',
model: gateway(gatewayModel),
modelId: gatewayModel,
};
}

Expand Down
64 changes: 64 additions & 0 deletions src/commands/create.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -219,6 +219,70 @@ describe('create with --ai', () => {
);
});

it('uses DUBSTACK_GEMINI_MODEL override when provided', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
process.env.DUBSTACK_GEMINI_MODEL = 'gemini-2.5-flash';
fs.writeFileSync(
path.join(dir, 'ai-model.ts'),
'export const aiModel = 1;\n',
);
await gitInRepo(dir, ['add', 'ai-model.ts']);

const generateText = vi.fn().mockResolvedValue({
text: '{"branch":"feat/ai-model-override","message":"feat: model override"}',
});
const googleModel = vi.fn().mockReturnValue('google-model');
const createGoogleGenerativeAI = vi.fn().mockReturnValue(googleModel);
const createGateway = vi.fn();

await create(
undefined as unknown as string,
dir,
{ ai: true },
{
generateText,
createGoogleGenerativeAI,
createGateway,
},
);

expect(googleModel).toHaveBeenCalledWith('gemini-2.5-flash');
});

it('uses DUBSTACK_AI_GATEWAY_MODEL override when provided', async () => {
await writeConfig({ aiAssistantEnabled: true }, dir);
delete process.env.DUBSTACK_GEMINI_API_KEY;
process.env.DUBSTACK_AI_GATEWAY_API_KEY = 'gateway-key';
process.env.DUBSTACK_AI_GATEWAY_MODEL = 'google/gemini-2.5-flash';
fs.writeFileSync(
path.join(dir, 'ai-gateway.ts'),
'export const viaGateway = 1;\n',
);
await gitInRepo(dir, ['add', 'ai-gateway.ts']);

const generateText = vi.fn().mockResolvedValue({
text: '{"branch":"feat/ai-gateway-model","message":"feat: gateway model override"}',
});
const createGoogleGenerativeAI = vi.fn();
const gatewayModel = vi.fn().mockReturnValue('gateway-model');
const createGateway = vi.fn().mockReturnValue(gatewayModel);

await create(
undefined as unknown as string,
dir,
{ ai: true },
{
generateText,
createGoogleGenerativeAI,
createGateway,
},
);

expect(createGoogleGenerativeAI).not.toHaveBeenCalled();
expect(gatewayModel).toHaveBeenCalledWith('google/gemini-2.5-flash');
});

it('requires ai assistant to be enabled in config', async () => {
process.env.DUBSTACK_GEMINI_API_KEY = 'gem-key';
fs.writeFileSync(path.join(dir, 'ai-off.ts'), 'export const off = true;\n');
Expand Down
Loading