Skip to content

Commit 4dfd7ce

Browse files
fix(stats): filter zero-token models
Avoid showing models in dashboard stats when a session recorded no actual token usage, while still keeping the rest of the session counts intact. Co-Authored-By: chatgpt-codex-connector[bot] <199175422+chatgpt-codex-connector[bot]@users.noreply.github.com>
1 parent 8145616 commit 4dfd7ce

2 files changed

Lines changed: 47 additions & 3 deletions

File tree

packages/server/src/services/stats.test.ts

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -225,4 +225,44 @@ describe("scanStats", () => {
225225
const second = await runEffect(scanStats(registry));
226226
expect(second.inputTokens).toBe(999);
227227
});
228+
229+
test("filters models with zero total tokens from stats", async () => {
230+
const registry = new PluginRegistry();
231+
const zeroUsageSession = makeSession("s1", "project-1", isoDaysAgo(0), "gpt-5", 0, 0);
232+
const [, assistantTurn] = zeroUsageSession.turns;
233+
if (assistantTurn?.kind === "assistant" && assistantTurn.usage) {
234+
assistantTurn.usage.cacheReadTokens = 0;
235+
assistantTurn.usage.cacheCreationTokens = 0;
236+
}
237+
238+
const countedSession = makeSession("s2", "project-1", isoDaysAgo(0), "claude-opus", 10, 5);
239+
240+
const list: SessionSummary[] = [
241+
{
242+
sessionId: "s1",
243+
timestamp: zeroUsageSession.turns[0]?.timestamp ?? "",
244+
slug: "s1",
245+
firstMessage: "session 1",
246+
model: "gpt-5",
247+
gitBranch: "main",
248+
},
249+
{
250+
sessionId: "s2",
251+
timestamp: countedSession.turns[0]?.timestamp ?? "",
252+
slug: "s2",
253+
firstMessage: "session 2",
254+
model: "claude-opus",
255+
gitBranch: "main",
256+
},
257+
];
258+
259+
registry.register(createMockPlugin({ s1: zeroUsageSession, s2: countedSession }, list), testConfig);
260+
261+
const stats = await runEffect(scanStats(registry));
262+
expect(stats.sessions).toBe(2);
263+
expect(stats.messages).toBe(4);
264+
expect(stats.toolCalls).toBe(2);
265+
expect(stats.models["gpt-5"]).toBeUndefined();
266+
expect(stats.models["claude-opus"]?.inputTokens).toBe(10);
267+
});
228268
});

packages/server/src/services/stats.ts

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,10 @@ function applyUsageStats(stats: DashboardStats, modelUsage: ModelTokenUsage, usa
152152
modelUsage.cacheCreationTokens += usage.cacheCreationTokens ?? 0;
153153
}
154154

155+
function totalUsageTokens(usage: TokenUsage): number {
156+
return usage.inputTokens + usage.outputTokens + (usage.cacheReadTokens ?? 0) + (usage.cacheCreationTokens ?? 0);
157+
}
158+
155159
function applyTurnStats(stats: DashboardStats, turns: Turn[], fallbackModel: string): void {
156160
stats.messages += countVisibleMessages(turns);
157161

@@ -161,11 +165,11 @@ function applyTurnStats(stats: DashboardStats, turns: Turn[], fallbackModel: str
161165
}
162166

163167
stats.toolCalls += turn.contentBlocks.filter((block) => block.type === "tool_call").length;
164-
const modelUsage = ensureModelUsage(stats.models, turn.model || fallbackModel || "unknown");
165-
166-
if (!turn.usage) {
168+
if (!turn.usage || totalUsageTokens(turn.usage) <= 0) {
167169
continue;
168170
}
171+
172+
const modelUsage = ensureModelUsage(stats.models, turn.model || fallbackModel || "unknown");
169173
applyUsageStats(stats, modelUsage, turn.usage);
170174
}
171175
}

0 commit comments

Comments
 (0)