Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions src/ccstatusline.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,14 @@ import {
} from './utils/speed-window';
import { prefetchUsageDataIfNeeded } from './utils/usage-prefetch';

function formatModelDisplayName(modelId: string): string {
return modelId
.replace(/^claude-/, '')
.replace(/-(\d{8})$/, '')
.replace(/-/g, ' ')
.replace(/\b\w/g, c => c.toUpperCase());
}

function hasSessionDurationInStatusJson(data: StatusJSON): boolean {
const durationMs = data.cost?.total_duration_ms;
return typeof durationMs === 'number' && Number.isFinite(durationMs) && durationMs >= 0;
Expand Down Expand Up @@ -114,6 +122,18 @@ async function renderMultipleLines(data: StatusJSON) {
let tokenMetrics: TokenMetrics | null = null;
if (data.transcript_path) {
tokenMetrics = await getTokenMetrics(data.transcript_path);

if (tokenMetrics.model) {
const stdinModelId = typeof data.model === 'string'
? data.model
: data.model?.id;
if (stdinModelId !== tokenMetrics.model) {
data.model = {
id: tokenMetrics.model,
display_name: formatModelDisplayName(tokenMetrics.model)
};
}
}
}

let sessionDuration: string | null = null;
Expand Down
3 changes: 2 additions & 1 deletion src/types/TokenMetrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export interface TokenUsage {
}

export interface TranscriptLine {
message?: { usage?: TokenUsage };
message?: { usage?: TokenUsage; model?: string };
isSidechain?: boolean;
timestamp?: string;
isApiErrorMessage?: boolean;
Expand All @@ -19,4 +19,5 @@ export interface TokenMetrics {
cachedTokens: number;
totalTokens: number;
contextLength: number;
model: string | null;
}
27 changes: 18 additions & 9 deletions src/utils/__tests__/context-percentage.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 100000
contextLength: 100000,
model: null
}
};

Expand Down Expand Up @@ -61,7 +62,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -79,7 +81,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -95,7 +98,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 2000000
contextLength: 2000000,
model: null
}
};

Expand All @@ -111,7 +115,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -127,7 +132,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -148,7 +154,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -166,7 +173,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand All @@ -188,7 +196,8 @@ describe('calculateContextPercentage', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand Down
79 changes: 76 additions & 3 deletions src/utils/__tests__/jsonl-metrics.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ function makeTranscriptLine(params: {
output?: number;
isSidechain?: boolean;
isApiErrorMessage?: boolean;
model?: string;
}): string {
return JSON.stringify({
timestamp: params.timestamp,
Expand All @@ -57,7 +58,8 @@ function makeTranscriptLine(params: {
usage: {
input_tokens: params.input ?? 0,
output_tokens: params.output ?? 0
}
},
model: params.model
}
: undefined
});
Expand Down Expand Up @@ -155,7 +157,8 @@ describe('jsonl transcript metrics', () => {
outputTokens: 141,
cachedTokens: 92,
totalTokens: 2032,
contextLength: 250
contextLength: 250,
model: null
});
});

Expand All @@ -166,10 +169,80 @@ describe('jsonl transcript metrics', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 0
contextLength: 0,
model: null
});
});

it('returns the model from the most recent assistant entry', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'model.jsonl');
fs.writeFileSync(transcriptPath, [
makeTranscriptLine({
timestamp: '2026-01-01T10:00:00.000Z',
type: 'user'
}),
makeTranscriptLine({
timestamp: '2026-01-01T10:00:05.000Z',
type: 'assistant',
input: 100,
output: 50,
model: 'claude-sonnet-4-5-20250929'
})
].join('\n'));

const metrics = await getTokenMetrics(transcriptPath);
expect(metrics.model).toBe('claude-sonnet-4-5-20250929');
});

it('returns null model when the last entry is a user message (API in progress)', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'model-pending.jsonl');
fs.writeFileSync(transcriptPath, [
makeTranscriptLine({
timestamp: '2026-01-01T10:00:00.000Z',
type: 'user'
}),
makeTranscriptLine({
timestamp: '2026-01-01T10:00:05.000Z',
type: 'assistant',
input: 100,
output: 50,
model: 'claude-sonnet-4-5-20250929'
}),
makeTranscriptLine({
timestamp: '2026-01-01T10:01:00.000Z',
type: 'user'
})
].join('\n'));

const metrics = await getTokenMetrics(transcriptPath);
expect(metrics.model).toBeNull();
});

it('returns null model when transcript has no model field', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-metrics-'));
tempRoots.push(root);
const transcriptPath = path.join(root, 'no-model.jsonl');
fs.writeFileSync(transcriptPath, [
makeTranscriptLine({
timestamp: '2026-01-01T10:00:00.000Z',
type: 'user'
}),
makeTranscriptLine({
timestamp: '2026-01-01T10:00:05.000Z',
type: 'assistant',
input: 100,
output: 50
})
].join('\n'));

const metrics = await getTokenMetrics(transcriptPath);
expect(metrics.model).toBeNull();
});

it('calculates speed metrics from user-to-assistant processing windows', async () => {
const root = fs.mkdtempSync(path.join(os.tmpdir(), 'ccstatusline-jsonl-speed-'));
tempRoots.push(root);
Expand Down
28 changes: 25 additions & 3 deletions src/utils/jsonl-metrics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ export async function getTokenMetrics(transcriptPath: string): Promise<TokenMetr
try {
// Use Node.js-compatible file reading
if (!fs.existsSync(transcriptPath)) {
return { inputTokens: 0, outputTokens: 0, cachedTokens: 0, totalTokens: 0, contextLength: 0 };
return { inputTokens: 0, outputTokens: 0, cachedTokens: 0, totalTokens: 0, contextLength: 0, model: null };
}

const lines = await readJsonlLines(transcriptPath);
Expand Down Expand Up @@ -194,11 +194,33 @@ export async function getTokenMetrics(transcriptPath: string): Promise<TokenMetr
+ (usage.cache_creation_input_tokens ?? 0);
}

// Check if last entry is a user message (API call may be in progress).
// In that case, don't report the model — it may be stale if the user
// just changed models. After the response, the transcript catches up.
let model: string | null = null;
if (mostRecentMainChainEntry?.message?.model) {
let lastEntryIsUser = false;
for (let i = lines.length - 1; i >= 0; i--) {
const line = lines[i];
if (!line) {
continue;
}
const last = parseJsonlLine(line) as TranscriptLine | null;
if (last?.type === 'assistant' || last?.type === 'user') {
lastEntryIsUser = last.type === 'user';
break;
}
}
if (!lastEntryIsUser) {
model = mostRecentMainChainEntry.message.model;
}
}

const totalTokens = inputTokens + outputTokens + cachedTokens;

return { inputTokens, outputTokens, cachedTokens, totalTokens, contextLength };
return { inputTokens, outputTokens, cachedTokens, totalTokens, contextLength, model };
} catch {
return { inputTokens: 0, outputTokens: 0, cachedTokens: 0, totalTokens: 0, contextLength: 0 };
return { inputTokens: 0, outputTokens: 0, cachedTokens: 0, totalTokens: 0, contextLength: 0, model: null };
}
}

Expand Down
9 changes: 6 additions & 3 deletions src/widgets/__tests__/ContextBar.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ describe('ContextBarWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 50000
contextLength: 50000,
model: null
}
};
const widget = new ContextBarWidget();
Expand All @@ -65,7 +66,8 @@ describe('ContextBarWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 50000
contextLength: 50000,
model: null
}
};
const widget = new ContextBarWidget();
Expand All @@ -81,7 +83,8 @@ describe('ContextBarWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 50000
contextLength: 50000,
model: null
}
};
const widget = new ContextBarWidget();
Expand Down
6 changes: 4 additions & 2 deletions src/widgets/__tests__/ContextPercentage.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ function render(modelId: string | undefined, contextLength: number, rawValue = f
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength
contextLength,
model: null
}
};
const item: WidgetItem = {
Expand Down Expand Up @@ -72,7 +73,8 @@ describe('ContextPercentageWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 100000
contextLength: 100000,
model: null
}
};

Expand Down
9 changes: 6 additions & 3 deletions src/widgets/__tests__/ContextPercentageUsable.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ function render(modelId: string | undefined, contextLength: number, rawValue = f
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength
contextLength,
model: null
}
};
const item: WidgetItem = {
Expand Down Expand Up @@ -76,7 +77,8 @@ describe('ContextPercentageUsableWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 200000
contextLength: 200000,
model: null
}
};

Expand All @@ -99,7 +101,8 @@ describe('ContextPercentageUsableWidget', () => {
outputTokens: 0,
cachedTokens: 0,
totalTokens: 0,
contextLength: 42000
contextLength: 42000,
model: null
}
};

Expand Down
Loading