mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-03-22 03:51:22 -07:00
Merge branch 'main' into adibakm/clear-context-conversation-approval
This commit is contained in:
@@ -171,6 +171,7 @@ export class ChatRecordingService {
|
||||
this.cachedConversation = null;
|
||||
} else {
|
||||
// Create new session
|
||||
this.sessionId = this.config.getSessionId();
|
||||
const chatsDir = path.join(
|
||||
this.config.storage.getProjectTempDir(),
|
||||
'chats',
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
type ApiErrorEvent,
|
||||
type ApiResponseEvent,
|
||||
} from './types.js';
|
||||
import { type ConversationRecord } from '../services/chatRecordingService.js';
|
||||
import type {
|
||||
CompletedToolCall,
|
||||
ErroredToolCall,
|
||||
@@ -698,6 +699,121 @@ describe('UiTelemetryService', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('clear', () => {
|
||||
it('should reset metrics and last prompt token count', () => {
|
||||
// Set up initial state with some metrics
|
||||
const event = {
|
||||
'event.name': EVENT_API_RESPONSE,
|
||||
model: 'gemini-2.5-pro',
|
||||
duration_ms: 500,
|
||||
usage: {
|
||||
input_token_count: 100,
|
||||
output_token_count: 200,
|
||||
total_token_count: 300,
|
||||
cached_content_token_count: 50,
|
||||
thoughts_token_count: 20,
|
||||
tool_token_count: 30,
|
||||
},
|
||||
} as ApiResponseEvent & { 'event.name': typeof EVENT_API_RESPONSE };
|
||||
|
||||
service.addEvent(event);
|
||||
service.setLastPromptTokenCount(123);
|
||||
|
||||
expect(service.getMetrics().models['gemini-2.5-pro']).toBeDefined();
|
||||
expect(service.getLastPromptTokenCount()).toBe(123);
|
||||
|
||||
service.clear();
|
||||
|
||||
expect(service.getMetrics().models).toEqual({});
|
||||
expect(service.getLastPromptTokenCount()).toBe(0);
|
||||
});
|
||||
|
||||
it('should emit clear and update events', () => {
|
||||
const clearSpy = vi.fn();
|
||||
const updateSpy = vi.fn();
|
||||
service.on('clear', clearSpy);
|
||||
service.on('update', updateSpy);
|
||||
|
||||
const newSessionId = 'new-session-id';
|
||||
service.clear(newSessionId);
|
||||
|
||||
expect(clearSpy).toHaveBeenCalledWith(newSessionId);
|
||||
expect(updateSpy).toHaveBeenCalledOnce();
|
||||
const { metrics, lastPromptTokenCount } = updateSpy.mock.calls[0][0];
|
||||
expect(metrics.models).toEqual({});
|
||||
expect(lastPromptTokenCount).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('hydrate', () => {
|
||||
it('should aggregate metrics from a ConversationRecord', () => {
|
||||
const conversation = {
|
||||
sessionId: 'resumed-session',
|
||||
messages: [
|
||||
{
|
||||
type: 'user',
|
||||
content: 'Hello',
|
||||
},
|
||||
{
|
||||
type: 'gemini',
|
||||
model: 'gemini-1.5-pro',
|
||||
tokens: {
|
||||
input: 10,
|
||||
output: 20,
|
||||
total: 30,
|
||||
cached: 5,
|
||||
thoughts: 2,
|
||||
tool: 3,
|
||||
},
|
||||
toolCalls: [
|
||||
{ name: 'test_tool', status: 'success' },
|
||||
{ name: 'test_tool', status: 'error' },
|
||||
],
|
||||
},
|
||||
{
|
||||
type: 'gemini',
|
||||
model: 'gemini-1.5-pro',
|
||||
tokens: {
|
||||
input: 100,
|
||||
output: 200,
|
||||
total: 300,
|
||||
cached: 50,
|
||||
thoughts: 20,
|
||||
tool: 30,
|
||||
},
|
||||
},
|
||||
],
|
||||
} as unknown as ConversationRecord;
|
||||
|
||||
const clearSpy = vi.fn();
|
||||
const updateSpy = vi.fn();
|
||||
service.on('clear', clearSpy);
|
||||
service.on('update', updateSpy);
|
||||
|
||||
service.hydrate(conversation);
|
||||
|
||||
expect(clearSpy).toHaveBeenCalledWith('resumed-session');
|
||||
const metrics = service.getMetrics();
|
||||
const modelMetrics = metrics.models['gemini-1.5-pro'];
|
||||
|
||||
expect(modelMetrics).toBeDefined();
|
||||
expect(modelMetrics.tokens.prompt).toBe(110); // 10 + 100
|
||||
expect(modelMetrics.tokens.candidates).toBe(220); // 20 + 200
|
||||
expect(modelMetrics.tokens.cached).toBe(55); // 5 + 50
|
||||
expect(modelMetrics.tokens.thoughts).toBe(22); // 2 + 20
|
||||
expect(modelMetrics.tokens.tool).toBe(33); // 3 + 30
|
||||
expect(modelMetrics.tokens.input).toBe(55); // 110 - 55
|
||||
|
||||
expect(metrics.tools.totalCalls).toBe(2);
|
||||
expect(metrics.tools.totalSuccess).toBe(1);
|
||||
expect(metrics.tools.totalFail).toBe(1);
|
||||
expect(metrics.tools.byName['test_tool'].count).toBe(2);
|
||||
|
||||
expect(service.getLastPromptTokenCount()).toBe(300); // 100 (input) + 200 (output)
|
||||
expect(updateSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Tool Call Event with Line Count Metadata', () => {
|
||||
it('should aggregate valid line count metadata', () => {
|
||||
const toolCall = createFakeCompletedToolCall('test_tool', true, 100);
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
} from './types.js';
|
||||
|
||||
import { ToolCallDecision } from './tool-call-decision.js';
|
||||
import { type ConversationRecord } from '../services/chatRecordingService.js';
|
||||
|
||||
export type UiEvent =
|
||||
| (ApiResponseEvent & { 'event.name': typeof EVENT_API_RESPONSE })
|
||||
@@ -185,6 +186,96 @@ export class UiTelemetryService extends EventEmitter {
|
||||
});
|
||||
}
|
||||
|
||||
clear(newSessionId?: string): void {
|
||||
this.#metrics = createInitialMetrics();
|
||||
this.#lastPromptTokenCount = 0;
|
||||
this.emit('clear', newSessionId);
|
||||
this.emit('update', {
|
||||
metrics: this.#metrics,
|
||||
lastPromptTokenCount: this.#lastPromptTokenCount,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hydrates the telemetry metrics from a historical conversation record.
|
||||
* This is used when resuming a session to restore token counts and tool stats.
|
||||
*/
|
||||
hydrate(conversation: ConversationRecord): void {
|
||||
this.clear(conversation.sessionId);
|
||||
|
||||
let totalTokensInContext = 0;
|
||||
|
||||
for (const message of conversation.messages) {
|
||||
if (message.type === 'gemini') {
|
||||
const model = message.model || 'unknown';
|
||||
const modelMetrics = this.getOrCreateModelMetrics(model);
|
||||
|
||||
// Restore API request stats
|
||||
modelMetrics.api.totalRequests++;
|
||||
|
||||
// Restore token metrics
|
||||
if (message.tokens) {
|
||||
modelMetrics.tokens.prompt += message.tokens.input;
|
||||
modelMetrics.tokens.candidates += message.tokens.output;
|
||||
modelMetrics.tokens.total += message.tokens.total;
|
||||
modelMetrics.tokens.cached += message.tokens.cached;
|
||||
modelMetrics.tokens.thoughts += message.tokens.thoughts || 0;
|
||||
modelMetrics.tokens.tool += message.tokens.tool || 0;
|
||||
modelMetrics.tokens.input = Math.max(
|
||||
0,
|
||||
modelMetrics.tokens.prompt - modelMetrics.tokens.cached,
|
||||
);
|
||||
|
||||
// The total tokens of the last Gemini message represents the context
|
||||
// size at that point in time.
|
||||
totalTokensInContext = message.tokens.total;
|
||||
}
|
||||
|
||||
// Restore tool metrics
|
||||
if (message.toolCalls) {
|
||||
for (const toolCall of message.toolCalls) {
|
||||
this.#metrics.tools.totalCalls++;
|
||||
if (toolCall.status === 'success') {
|
||||
this.#metrics.tools.totalSuccess++;
|
||||
} else if (toolCall.status === 'error') {
|
||||
this.#metrics.tools.totalFail++;
|
||||
}
|
||||
|
||||
if (!this.#metrics.tools.byName[toolCall.name]) {
|
||||
this.#metrics.tools.byName[toolCall.name] = {
|
||||
count: 0,
|
||||
success: 0,
|
||||
fail: 0,
|
||||
durationMs: 0,
|
||||
decisions: {
|
||||
[ToolCallDecision.ACCEPT]: 0,
|
||||
[ToolCallDecision.REJECT]: 0,
|
||||
[ToolCallDecision.MODIFY]: 0,
|
||||
[ToolCallDecision.AUTO_ACCEPT]: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const toolStats = this.#metrics.tools.byName[toolCall.name];
|
||||
toolStats.count++;
|
||||
if (toolCall.status === 'success') {
|
||||
toolStats.success++;
|
||||
} else if (toolCall.status === 'error') {
|
||||
toolStats.fail++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.#lastPromptTokenCount = totalTokensInContext;
|
||||
|
||||
this.emit('update', {
|
||||
metrics: this.#metrics,
|
||||
lastPromptTokenCount: this.#lastPromptTokenCount,
|
||||
});
|
||||
}
|
||||
|
||||
private getOrCreateModelMetrics(modelName: string): ModelMetrics {
|
||||
if (!this.#metrics.models[modelName]) {
|
||||
this.#metrics.models[modelName] = createInitialModelMetrics();
|
||||
|
||||
Reference in New Issue
Block a user