Show raw input token counts in json output. (#15021)

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
This commit is contained in:
Jacob Richman
2025-12-15 18:47:39 -08:00
committed by GitHub
parent bb0c0d8ee3
commit 79f664d593
17 changed files with 189 additions and 129 deletions

View File

@@ -87,6 +87,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 10,
prompt: 10,
candidates: 20,
total: 30,
@@ -128,6 +129,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 5,
prompt: 10,
candidates: 20,
total: 30,
@@ -139,6 +141,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-flash': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 50 },
tokens: {
input: 5,
prompt: 5,
candidates: 10,
total: 15,
@@ -180,6 +183,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 10, totalErrors: 1, totalLatencyMs: 1000 },
tokens: {
input: 50,
prompt: 100,
candidates: 200,
total: 300,
@@ -191,6 +195,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-flash': {
api: { totalRequests: 20, totalErrors: 2, totalLatencyMs: 500 },
tokens: {
input: 100,
prompt: 200,
candidates: 400,
total: 600,
@@ -235,6 +240,7 @@ describe('<ModelStatsDisplay />', () => {
totalLatencyMs: 9876,
},
tokens: {
input: 987654321 - 123456789,
prompt: 987654321,
candidates: 123456789,
total: 999999999,
@@ -272,6 +278,7 @@ describe('<ModelStatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 5,
prompt: 10,
candidates: 20,
total: 30,

View File

@@ -170,7 +170,7 @@ export const ModelStatsDisplay: React.FC = () => {
isSubtle
values={getModelValues((m) => (
<Text color={theme.text.primary}>
{Math.max(0, m.tokens.prompt - m.tokens.cached).toLocaleString()}
{m.tokens.input.toLocaleString()}
</Text>
))}
/>

View File

@@ -45,6 +45,7 @@ describe('<SessionSummaryDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 10, totalErrors: 1, totalLatencyMs: 50234 },
tokens: {
input: 500,
prompt: 1000,
candidates: 2000,
total: 3500,

View File

@@ -85,6 +85,7 @@ describe('<StatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 3, totalErrors: 0, totalLatencyMs: 15000 },
tokens: {
input: 500,
prompt: 1000,
candidates: 2000,
total: 43234,
@@ -96,6 +97,7 @@ describe('<StatsDisplay />', () => {
'gemini-2.5-flash': {
api: { totalRequests: 5, totalErrors: 1, totalLatencyMs: 4500 },
tokens: {
input: 15000,
prompt: 25000,
candidates: 15000,
total: 150000000,
@@ -123,6 +125,7 @@ describe('<StatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 50,
prompt: 100,
candidates: 100,
total: 250,
@@ -216,6 +219,7 @@ describe('<StatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 100,
prompt: 100,
candidates: 100,
total: 200,
@@ -398,6 +402,7 @@ describe('<StatsDisplay />', () => {
'gemini-2.5-pro': {
api: { totalRequests: 1, totalErrors: 0, totalLatencyMs: 100 },
tokens: {
input: 50,
prompt: 100,
candidates: 100,
total: 250,

View File

@@ -85,15 +85,13 @@ const buildModelRows = (
const activeRows = Object.entries(models).map(([name, metrics]) => {
const modelName = getBaseModelName(name);
const cachedTokens = metrics.tokens.cached;
const totalInputTokens = metrics.tokens.prompt;
const uncachedTokens = Math.max(0, totalInputTokens - cachedTokens);
const inputTokens = metrics.tokens.input;
return {
key: name,
modelName,
requests: metrics.api.totalRequests,
cachedTokens: cachedTokens.toLocaleString(),
uncachedTokens: uncachedTokens.toLocaleString(),
totalInputTokens: totalInputTokens.toLocaleString(),
inputTokens: inputTokens.toLocaleString(),
outputTokens: metrics.tokens.candidates.toLocaleString(),
bucket: quotas?.buckets?.find((b) => b.modelId === modelName),
isActive: true,
@@ -114,8 +112,7 @@ const buildModelRows = (
modelName: bucket.modelId!,
requests: '-',
cachedTokens: '-',
uncachedTokens: '-',
totalInputTokens: '-',
inputTokens: '-',
outputTokens: '-',
bucket,
isActive: false,
@@ -290,7 +287,7 @@ const ModelUsageTable: React.FC<{
row.isActive ? theme.text.primary : theme.text.secondary
}
>
{row.uncachedTokens}
{row.inputTokens}
</Text>
</Box>
<Box