Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 36 additions & 2 deletions lib/agents.sh
Original file line number Diff line number Diff line change
Expand Up @@ -150,10 +150,12 @@ agent_add() {
echo " 1) Anthropic (Claude)"
echo " 2) OpenAI (Codex)"
echo " 3) OpenCode"
read -rp "Choose [1-3, default: 1]: " AGENT_PROVIDER_CHOICE
echo " 4) Gemini CLI"
read -rp "Choose [1-4, default: 1]: " AGENT_PROVIDER_CHOICE
case "$AGENT_PROVIDER_CHOICE" in
2) AGENT_PROVIDER="openai" ;;
3) AGENT_PROVIDER="opencode" ;;
4) AGENT_PROVIDER="gemini" ;;
*) AGENT_PROVIDER="anthropic" ;;
esac

Expand Down Expand Up @@ -191,6 +193,21 @@ agent_add() {
8) read -rp "Enter model name (e.g. provider/model): " AGENT_MODEL ;;
*) AGENT_MODEL="opencode/claude-sonnet-4-5" ;;
esac
elif [ "$AGENT_PROVIDER" = "gemini" ]; then
echo "Model:"
echo " 1) auto (recommended)"
echo " 2) pro (gemini-2.5-pro)"
echo " 3) flash (gemini-2.5-flash)"
echo " 4) flash-lite (gemini-2.5-flash-lite)"
echo " 5) Custom (enter model name)"
read -rp "Choose [1-5, default: 1]: " AGENT_MODEL_CHOICE
case "$AGENT_MODEL_CHOICE" in
2) AGENT_MODEL="pro" ;;
3) AGENT_MODEL="flash" ;;
4) AGENT_MODEL="flash-lite" ;;
5) read -rp "Enter model name: " AGENT_MODEL ;;
*) AGENT_MODEL="auto" ;;
esac
else
echo "Model:"
echo " 1) GPT-5.3 Codex"
Expand Down Expand Up @@ -517,15 +534,32 @@ agent_provider() {
echo "Use 'tinyclaw agent provider ${agent_id} openai --model {gpt-5.3-codex|gpt-5.2}' to also set the model."
fi
;;
gemini)
if [ -n "$model_arg" ]; then
jq --arg id "$agent_id" --arg model "$model_arg" \
'.agents[$id].provider = "gemini" | .agents[$id].model = $model' \
"$SETTINGS_FILE" > "$tmp_file" && mv "$tmp_file" "$SETTINGS_FILE"
echo -e "${GREEN}✓ Agent '${agent_id}' switched to Gemini with model: ${model_arg}${NC}"
else
jq --arg id "$agent_id" \
'.agents[$id].provider = "gemini"' \
"$SETTINGS_FILE" > "$tmp_file" && mv "$tmp_file" "$SETTINGS_FILE"
echo -e "${GREEN}✓ Agent '${agent_id}' switched to Gemini${NC}"
echo ""
echo "Use 'tinyclaw agent provider ${agent_id} gemini --model {auto|pro|flash|flash-lite}' to also set the model."
fi
;;
*)
echo "Usage: tinyclaw agent provider <agent_id> {anthropic|openai} [--model MODEL_NAME]"
echo "Usage: tinyclaw agent provider <agent_id> {anthropic|openai|gemini} [--model MODEL_NAME]"
echo ""
echo "Examples:"
echo " tinyclaw agent provider coder # Show current provider/model"
echo " tinyclaw agent provider coder anthropic # Switch to Anthropic"
echo " tinyclaw agent provider coder openai # Switch to OpenAI"
echo " tinyclaw agent provider coder gemini # Switch to Gemini"
echo " tinyclaw agent provider coder anthropic --model opus # Switch to Anthropic Opus"
echo " tinyclaw agent provider coder openai --model gpt-5.3-codex # Switch to OpenAI GPT-5.3 Codex"
echo " tinyclaw agent provider coder gemini --model flash # Switch to Gemini Flash"
exit 1
;;
esac
Expand Down
53 changes: 49 additions & 4 deletions lib/setup-wizard.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,13 +102,15 @@ echo ""
echo " 1) Anthropic (Claude) (recommended)"
echo " 2) OpenAI (Codex/GPT)"
echo " 3) OpenCode"
echo " 4) Gemini CLI"
echo ""
read -rp "Choose [1-3]: " PROVIDER_CHOICE
read -rp "Choose [1-4]: " PROVIDER_CHOICE

case "$PROVIDER_CHOICE" in
1) PROVIDER="anthropic" ;;
2) PROVIDER="openai" ;;
3) PROVIDER="opencode" ;;
4) PROVIDER="gemini" ;;
*)
echo -e "${RED}Invalid choice${NC}"
exit 1
Expand Down Expand Up @@ -176,6 +178,36 @@ elif [ "$PROVIDER" = "opencode" ]; then
esac
echo -e "${GREEN}✓ Model: $MODEL${NC}"
echo ""
elif [ "$PROVIDER" = "gemini" ]; then
echo "Which Gemini model?"
echo ""
echo " 1) auto (recommended)"
echo " 2) pro (gemini-2.5-pro)"
echo " 3) flash (gemini-2.5-flash)"
echo " 4) flash-lite (gemini-2.5-flash-lite)"
echo " 5) Custom (enter model name)"
echo ""
read -rp "Choose [1-5]: " MODEL_CHOICE

case "$MODEL_CHOICE" in
1) MODEL="auto" ;;
2) MODEL="pro" ;;
3) MODEL="flash" ;;
4) MODEL="flash-lite" ;;
5)
read -rp "Enter model name: " MODEL
if [ -z "$MODEL" ]; then
echo -e "${RED}Model name required${NC}"
exit 1
fi
;;
*)
echo -e "${RED}Invalid choice${NC}"
exit 1
;;
esac
echo -e "${GREEN}✓ Model: $MODEL${NC}"
echo ""
else
# OpenAI models
echo "Which OpenAI model?"
Expand Down Expand Up @@ -207,7 +239,7 @@ fi

# Heartbeat interval
echo "Heartbeat interval (seconds)?"
echo -e "${YELLOW}(How often Claude checks in proactively)${NC}"
echo -e "${YELLOW}(How often your default agent checks in proactively)${NC}"
echo ""
read -rp "Interval in seconds [default: 3600]: " HEARTBEAT_INPUT
HEARTBEAT_INTERVAL=${HEARTBEAT_INPUT:-3600}
Expand Down Expand Up @@ -288,11 +320,12 @@ if [[ "$SETUP_AGENTS" =~ ^[yY] ]]; then
read -rp " Display name: " NEW_AGENT_NAME
[ -z "$NEW_AGENT_NAME" ] && NEW_AGENT_NAME="$NEW_AGENT_ID"

echo " Provider: 1) Anthropic 2) OpenAI 3) OpenCode"
read -rp " Choose [1-3, default: 1]: " NEW_PROVIDER_CHOICE
echo " Provider: 1) Anthropic 2) OpenAI 3) OpenCode 4) Gemini"
read -rp " Choose [1-4, default: 1]: " NEW_PROVIDER_CHOICE
case "$NEW_PROVIDER_CHOICE" in
2) NEW_PROVIDER="openai" ;;
3) NEW_PROVIDER="opencode" ;;
4) NEW_PROVIDER="gemini" ;;
*) NEW_PROVIDER="anthropic" ;;
esac

Expand All @@ -314,6 +347,16 @@ if [[ "$SETUP_AGENTS" =~ ^[yY] ]]; then
5) read -rp " Enter model name (e.g. provider/model): " NEW_MODEL ;;
*) NEW_MODEL="opencode/claude-sonnet-4-5" ;;
esac
elif [ "$NEW_PROVIDER" = "gemini" ]; then
echo " Model: 1) auto 2) pro 3) flash 4) flash-lite 5) Custom"
read -rp " Choose [1-5, default: 1]: " NEW_MODEL_CHOICE
case "$NEW_MODEL_CHOICE" in
2) NEW_MODEL="pro" ;;
3) NEW_MODEL="flash" ;;
4) NEW_MODEL="flash-lite" ;;
5) read -rp " Enter model name: " NEW_MODEL ;;
*) NEW_MODEL="auto" ;;
esac
else
echo " Model: 1) GPT-5.3 Codex 2) GPT-5.2 3) Custom"
read -rp " Choose [1-3, default: 1]: " NEW_MODEL_CHOICE
Expand Down Expand Up @@ -357,6 +400,8 @@ if [ "$PROVIDER" = "anthropic" ]; then
MODELS_SECTION='"models": { "provider": "anthropic", "anthropic": { "model": "'"${MODEL}"'" } }'
elif [ "$PROVIDER" = "opencode" ]; then
MODELS_SECTION='"models": { "provider": "opencode", "opencode": { "model": "'"${MODEL}"'" } }'
elif [ "$PROVIDER" = "gemini" ]; then
MODELS_SECTION='"models": { "provider": "gemini", "gemini": { "model": "'"${MODEL}"'" } }'
else
MODELS_SECTION='"models": { "provider": "openai", "openai": { "model": "'"${MODEL}"'" } }'
fi
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

22 changes: 22 additions & 0 deletions src/lib/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,11 @@ export function ensureAgentDirectory(agentDir: string): void {
fs.copyFileSync(sourceAgents, path.join(agentDir, '.claude', 'CLAUDE.md'));
}

// Copy AGENTS.md as GEMINI.md for Gemini CLI context loading.
if (fs.existsSync(sourceAgents)) {
fs.copyFileSync(sourceAgents, path.join(agentDir, 'GEMINI.md'));
}

// Copy default skills from SCRIPT_DIR into .agents/skills
const sourceSkills = path.join(SCRIPT_DIR, '.agents', 'skills');
if (fs.existsSync(sourceSkills)) {
Expand Down Expand Up @@ -144,4 +149,21 @@ export function updateAgentTeammates(agentDir: string, agentId: string, agents:
claudeContent = claudeContent.trimEnd() + '\n\n' + startMarker + block + endMarker + '\n';
}
fs.writeFileSync(claudeMdPath, claudeContent);

// Also write to GEMINI.md for Gemini CLI.
const geminiMdPath = path.join(agentDir, 'GEMINI.md');
let geminiContent = '';
if (fs.existsSync(geminiMdPath)) {
geminiContent = fs.readFileSync(geminiMdPath, 'utf8');
} else if (fs.existsSync(agentsMdPath)) {
geminiContent = fs.readFileSync(agentsMdPath, 'utf8');
}
const gStartIdx = geminiContent.indexOf(startMarker);
const gEndIdx = geminiContent.indexOf(endMarker);
if (gStartIdx !== -1 && gEndIdx !== -1) {
geminiContent = geminiContent.substring(0, gStartIdx + startMarker.length) + block + geminiContent.substring(gEndIdx);
} else {
geminiContent = geminiContent.trimEnd() + '\n\n' + startMarker + block + endMarker + '\n';
}
fs.writeFileSync(geminiMdPath, geminiContent);
}
15 changes: 14 additions & 1 deletion src/lib/config.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import fs from 'fs';
import path from 'path';
import { jsonrepair } from 'jsonrepair';
import { Settings, AgentConfig, TeamConfig, CLAUDE_MODEL_IDS, CODEX_MODEL_IDS, OPENCODE_MODEL_IDS } from './types';
import { Settings, AgentConfig, TeamConfig, CLAUDE_MODEL_IDS, CODEX_MODEL_IDS, GEMINI_MODEL_IDS, OPENCODE_MODEL_IDS } from './types';

export const SCRIPT_DIR = path.resolve(__dirname, '../..');
const _localTinyclaw = path.join(SCRIPT_DIR, '.tinyclaw');
Expand Down Expand Up @@ -45,6 +45,9 @@ export function getSettings(): Settings {
if (settings?.models?.openai) {
if (!settings.models) settings.models = {};
settings.models.provider = 'openai';
} else if (settings?.models?.gemini) {
if (!settings.models) settings.models = {};
settings.models.provider = 'gemini';
} else if (settings?.models?.opencode) {
if (!settings.models) settings.models = {};
settings.models.provider = 'opencode';
Expand All @@ -69,6 +72,8 @@ export function getDefaultAgentFromModels(settings: Settings): AgentConfig {
let model = '';
if (provider === 'openai') {
model = settings?.models?.openai?.model || 'gpt-5.3-codex';
} else if (provider === 'gemini') {
model = settings?.models?.gemini?.model || 'auto';
} else if (provider === 'opencode') {
model = settings?.models?.opencode?.model || 'sonnet';
} else {
Expand Down Expand Up @@ -120,6 +125,14 @@ export function resolveCodexModel(model: string): string {
return CODEX_MODEL_IDS[model] || model || '';
}

/**
* Resolve the model ID for Gemini CLI (passed via --model flag).
* Falls back to the raw model string from settings if no mapping is found.
*/
export function resolveGeminiModel(model: string): string {
return GEMINI_MODEL_IDS[model] || model || '';
}

/**
* Resolve the model ID for OpenCode (passed via --model flag).
* Falls back to the raw model string from settings if no mapping is found.
Expand Down
61 changes: 59 additions & 2 deletions src/lib/invoke.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { spawn } from 'child_process';
import fs from 'fs';
import path from 'path';
import { AgentConfig, TeamConfig } from './types';
import { SCRIPT_DIR, resolveClaudeModel, resolveCodexModel, resolveOpenCodeModel } from './config';
import { SCRIPT_DIR, resolveClaudeModel, resolveCodexModel, resolveGeminiModel, resolveOpenCodeModel } from './config';
import { log } from './logging';
import { ensureAgentDirectory, updateAgentTeammates } from './agent';

Expand Down Expand Up @@ -48,7 +48,7 @@ export async function runCommand(command: string, args: string[], cwd?: string):
}

/**
* Invoke a single agent with a message. Contains all Claude/Codex invocation logic.
* Invoke a single agent with a message. Contains provider-specific invocation logic.
* Returns the raw response text.
*/
export async function invokeAgent(
Expand Down Expand Up @@ -156,6 +156,63 @@ export async function invokeAgent(
}

return response || 'Sorry, I could not generate a response from OpenCode.';
} else if (provider === 'gemini') {
// Gemini CLI — non-interactive mode via --prompt.
// Uses --output-format json to return a single JSON object with a "response" field.
// Uses --resume latest for session continuation and retries fresh if resume is unavailable.
const modelId = resolveGeminiModel(agent.model);
log('INFO', `Using Gemini CLI (agent: ${agentId}, model: ${modelId || 'auto'})`);

const continueConversation = !shouldReset;

if (shouldReset) {
log('INFO', `🔄 Resetting Gemini conversation for agent: ${agentId}`);
}

const buildGeminiArgs = (withResume: boolean) => {
const args = ['--output-format', 'json', '--approval-mode', 'yolo'];
if (modelId) {
args.push('--model', modelId);
}
if (withResume) {
args.push('--resume', 'latest');
}
args.push('--prompt', message);
return args;
};

const parseGeminiOutput = (output: string): string | null => {
const trimmed = output.trim();
if (!trimmed) return null;
try {
const json = JSON.parse(trimmed);
if (typeof json.response === 'string' && json.response.trim()) {
return json.response;
}
if (json?.error?.message) {
return `Gemini CLI error: ${json.error.message}`;
}
} catch {
return trimmed;
}
return null;
};

let geminiOutput: string;
try {
geminiOutput = await runCommand('gemini', buildGeminiArgs(continueConversation), workingDir);
} catch (err: any) {
const errMsg = String(err?.message || '');
const resumeUnavailable = /(error resuming session|no .*session|session not found)/i.test(errMsg);
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Resume-error regex may be too narrow

The pattern /(error resuming session|no .*session|session not found)/i is hardcoded against three specific Gemini CLI error phrases. If the actual CLI emits a differently-worded message (e.g., "Session does not exist", "Cannot resume session", or any localised variant), the retry-without-resume fallback will never trigger — the original error will propagate instead and the agent will appear broken on first use.

Consider expanding the regex to cover more variants or, better, also catching the exit code if the Gemini CLI uses a distinctive non-zero code for "no session" errors:

Suggested change
const resumeUnavailable = /(error resuming session|no .*session|session not found)/i.test(errMsg);
const resumeUnavailable = /(error resuming|no .*session|session not found|cannot resume|session does not exist)/i.test(errMsg);

if (continueConversation && resumeUnavailable) {
log('INFO', `No resumable Gemini session for agent ${agentId}, starting fresh`);
geminiOutput = await runCommand('gemini', buildGeminiArgs(false), workingDir);
} else {
throw err;
}
}

return parseGeminiOutput(geminiOutput) || 'Sorry, I could not generate a response from Gemini.';
} else {
// Default to Claude (Anthropic)
log('INFO', `Using Claude provider (agent: ${agentId})`);
Expand Down
19 changes: 17 additions & 2 deletions src/lib/types.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
export interface AgentConfig {
name: string;
provider: string; // 'anthropic', 'openai', or 'opencode'
provider: string; // 'anthropic', 'openai', 'opencode', or 'gemini'
model: string; // e.g. 'sonnet', 'opus', 'gpt-5.3-codex'
working_directory: string;
system_prompt?: string;
Expand Down Expand Up @@ -43,7 +43,7 @@ export interface Settings {
whatsapp?: {};
};
models?: {
provider?: string; // 'anthropic', 'openai', or 'opencode'
provider?: string; // 'anthropic', 'openai', 'opencode', or 'gemini'
anthropic?: {
model?: string;
};
Expand All @@ -53,6 +53,9 @@ export interface Settings {
opencode?: {
model?: string;
};
gemini?: {
model?: string;
};
};
agents?: Record<string, AgentConfig>;
teams?: Record<string, TeamConfig>;
Expand Down Expand Up @@ -119,6 +122,18 @@ export const CODEX_MODEL_IDS: Record<string, string> = {
'gpt-5.3-codex': 'gpt-5.3-codex',
};

// Gemini CLI model IDs. Falls back to the raw model string.
export const GEMINI_MODEL_IDS: Record<string, string> = {
'auto': 'auto',
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

'auto' model resolves to a truthy string, so --model auto is always passed

GEMINI_MODEL_IDS['auto'] is set to the non-empty string 'auto'. In invoke.ts, resolveGeminiModel('auto') therefore returns 'auto', which is truthy — so the guard if (modelId) passes and --model auto is always appended to the Gemini CLI args.

The intent of the auto option, as described in the setup wizard ("Let Gemini choose"), is to let the CLI pick a model automatically, which typically means not passing --model at all. If the Gemini CLI does not accept --model auto as a valid argument, every invocation using the default model setting will fail.

The fix is to map 'auto' to an empty string so the flag is omitted:

Suggested change
'auto': 'auto',
'auto': '',

Alternatively, add an explicit guard in invoke.ts:

if (modelId && modelId !== 'auto') {
    args.push('--model', modelId);
}

'pro': 'gemini-2.5-pro',
'flash': 'gemini-2.5-flash',
'flash-lite': 'gemini-2.5-flash-lite',
'gemini-2.5-pro': 'gemini-2.5-pro',
'gemini-2.5-flash': 'gemini-2.5-flash',
'gemini-2.5-flash-lite': 'gemini-2.5-flash-lite',
'gemini-3-pro-preview': 'gemini-3-pro-preview',
};

// OpenCode model IDs in provider/model format (passed via --model / -m flag).
// Falls back to the raw model string from settings if no mapping is found.
export const OPENCODE_MODEL_IDS: Record<string, string> = {
Expand Down
Loading