Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/app/api/media/jobs/plan/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { NextRequest } from 'next/server';
import { streamTextFromProvider } from '@/lib/text-generator';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import fs from 'fs';
import type { PlanMediaJobRequest } from '@/types';

Expand Down Expand Up @@ -62,7 +62,7 @@ export async function POST(request: NextRequest) {
});
// Preserve 'env' semantics (see onboarding route for rationale)
const providerId = resolved.provider?.id || 'env';
const modelId = resolved.upstreamModel || resolved.model || session?.model || 'claude-sonnet-4-20250514';
const modelId = resolved.upstreamModel || resolved.model || session?.model || DEFAULT_MODEL_ID;

// Read document content
let docContent = body.docContent || '';
Expand Down
3 changes: 2 additions & 1 deletion src/app/api/providers/models/route.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { NextResponse } from 'next/server';
import { getAllProviders, getDefaultProviderId, setDefaultProviderId, getProvider, getModelsForProvider, getSetting } from '@/lib/db';
import { getContextWindow } from '@/lib/model-context';
import { CLAUDE_MODELS } from '@/lib/model-ids';
import { getDefaultModelsForProvider, inferProtocolFromLegacy, findPresetForLegacy } from '@/lib/provider-catalog';
import type { Protocol } from '@/lib/provider-catalog';
import type { ErrorResponse, ProviderModelGroup } from '@/types';
Expand Down Expand Up @@ -73,7 +74,7 @@ export async function GET() {
const cw = getContextWindow(m.value);
return {
value: m.value,
label: m.displayName,
label: CLAUDE_MODELS[m.value as keyof typeof CLAUDE_MODELS]?.displayName ?? m.displayName,
description: m.description,
supportsEffort: m.supportsEffort,
supportedEffortLevels: m.supportedEffortLevels,
Expand Down
11 changes: 5 additions & 6 deletions src/app/api/skills/search/route.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { NextResponse } from 'next/server';
import { getActiveProvider, getSetting } from '@/lib/db';
import { CLAUDE_MODELS } from '@/lib/provider-resolver';

interface SkillInfo {
name: string;
Expand All @@ -12,12 +13,10 @@ interface SearchRequest {
model?: string;
}

// Model alias -> full model ID
const MODEL_MAP: Record<string, string> = {
sonnet: 'claude-sonnet-4-20250514',
opus: 'claude-opus-4-20250514',
haiku: 'claude-haiku-4-20250414',
};
// Model alias -> full model ID (derived from central CLAUDE_MODELS)
const MODEL_MAP: Record<string, string> = Object.fromEntries(
Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.id])
);

interface ApiConfig {
supported: boolean;
Expand Down
2 changes: 1 addition & 1 deletion src/app/chat/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -473,7 +473,7 @@ export default function NewChatPage() {
try {
const statusData = JSON.parse(event.data);
if (statusData.session_id) {
setStatusText(`Connected (${statusData.model || 'claude'})`);
setStatusText(`Connected (${statusData.requested_model || statusData.model || 'claude'})`);
setTimeout(() => setStatusText(undefined), 2000);
} else if (statusData.notification) {
setStatusText(statusData.message || statusData.title || undefined);
Expand Down
4 changes: 2 additions & 2 deletions src/lib/checkin-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import fs from 'fs';
import path from 'path';
import { getSetting, getSession } from '@/lib/db';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import { loadState, saveState, writeDailyMemory } from '@/lib/assistant-workspace';
import { getLocalDateString } from '@/lib/utils';
import { generateTextFromProvider } from '@/lib/text-generator';
Expand Down Expand Up @@ -70,7 +70,7 @@ export async function processCheckin(
sessionModel: session?.model || undefined,
});
const providerId = resolved.provider?.id || 'env';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID;

const dailyMemoryPrompt = `You maintain daily memory entries for an AI assistant. Given the user's daily check-in answers, generate a daily memory entry for ${today}.

Expand Down
12 changes: 6 additions & 6 deletions src/lib/model-context.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { CLAUDE_MODELS } from './model-ids';

export const MODEL_CONTEXT_WINDOWS: Record<string, number> = {
'sonnet': 200000,
'opus': 200000,
'haiku': 200000,
'claude-sonnet-4-20250514': 200000,
'claude-opus-4-20250514': 200000,
'claude-haiku-4-5-20251001': 200000,
// Short aliases
...Object.fromEntries(Object.entries(CLAUDE_MODELS).map(([alias, m]) => [alias, m.contextWindow])),
// Full model IDs
...Object.fromEntries(Object.values(CLAUDE_MODELS).map(m => [m.id, m.contextWindow])),
};

export function getContextWindow(model: string): number | null {
Expand Down
17 changes: 17 additions & 0 deletions src/lib/model-ids.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/**
* Canonical Claude model definitions — single source of truth.
*
* This file has ZERO server-side imports (no fs, no db) so it can be
* safely imported from both server code and client-side React hooks.
*
* Update these when Anthropic releases new model generations.
*/

export const CLAUDE_MODELS = {
sonnet: { id: 'claude-sonnet-4-6', displayName: 'Sonnet 4.6', contextWindow: 200000 },
opus: { id: 'claude-opus-4-6', displayName: 'Opus 4.6', contextWindow: 200000 },
haiku: { id: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5', contextWindow: 200000 },
} as const;

/** Default model ID used as a last-resort fallback */
export const DEFAULT_MODEL_ID = CLAUDE_MODELS.sonnet.id;
4 changes: 2 additions & 2 deletions src/lib/onboarding-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import fs from 'fs';
import path from 'path';
import { getSetting, getSession } from '@/lib/db';
import { resolveProvider } from '@/lib/provider-resolver';
import { resolveProvider, DEFAULT_MODEL_ID } from '@/lib/provider-resolver';
import { loadState, saveState, ensureDailyDir, generateRootDocs } from '@/lib/assistant-workspace';
import { getLocalDateString } from '@/lib/utils';
import { generateTextFromProvider } from '@/lib/text-generator';
Expand Down Expand Up @@ -74,7 +74,7 @@ export async function processOnboarding(
sessionModel: session?.model || undefined,
});
const providerId = resolved.provider?.id || 'env';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || 'claude-sonnet-4-20250514';
const model = resolved.upstreamModel || resolved.model || getSetting('default_model') || DEFAULT_MODEL_ID;

const soulPrompt = `Based on the following user onboarding answers, generate a concise "soul.md" file that defines an AI assistant's personality, communication style, and behavioral rules. Write in second person ("You are..."). Keep it under 2000 characters. Use markdown headers and bullet points.\n\n${qaText}`;

Expand Down
16 changes: 10 additions & 6 deletions src/lib/provider-resolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ import {
getModelsForProvider,
} from './db';

// Canonical model definitions live in model-ids.ts (client-safe, no fs/db imports).
import { CLAUDE_MODELS, DEFAULT_MODEL_ID } from './model-ids';
export { CLAUDE_MODELS, DEFAULT_MODEL_ID };

// ── Resolution result ───────────────────────────────────────────

export interface ResolvedProvider {
Expand Down Expand Up @@ -289,7 +293,7 @@ export function toAiSdkConfig(
const catalogEntry = resolved.availableModels.find(m => m.modelId === modelOverride);
modelId = catalogEntry?.upstreamModelId || modelOverride;
} else {
modelId = resolved.upstreamModel || resolved.model || 'claude-sonnet-4-20250514';
modelId = resolved.upstreamModel || resolved.model || DEFAULT_MODEL_ID;
}
const provider = resolved.provider;
const protocol = resolved.protocol;
Expand Down Expand Up @@ -465,11 +469,11 @@ function buildResolution(

// Env mode uses short aliases (sonnet/opus/haiku) in the UI.
// Map them to full Anthropic model IDs so toAiSdkConfig can resolve correctly.
const envModels: CatalogModel[] = [
{ modelId: 'sonnet', upstreamModelId: 'claude-sonnet-4-20250514', displayName: 'Sonnet 4.6' },
{ modelId: 'opus', upstreamModelId: 'claude-opus-4-20250514', displayName: 'Opus 4.6' },
{ modelId: 'haiku', upstreamModelId: 'claude-haiku-4-5-20251001', displayName: 'Haiku 4.5' },
];
const envModels: CatalogModel[] = Object.entries(CLAUDE_MODELS).map(([alias, m]) => ({
modelId: alias,
upstreamModelId: m.id,
displayName: m.displayName,
}));

// Resolve upstream model from the alias table
const catalogEntry = model ? envModels.find(m => m.modelId === model) : undefined;
Expand Down
Loading