fix: clean up /clear and /resume (#22007)

This commit is contained in:
Jack Wotherspoon
2026-03-11 16:23:23 +01:00
committed by GitHub
parent 99bbbc2170
commit b804fe9662
9 changed files with 282 additions and 22 deletions

View File

@@ -17,6 +17,7 @@ vi.mock('@google/gemini-cli-core', async () => {
...actual,
uiTelemetryService: {
setLastPromptTokenCount: vi.fn(),
clear: vi.fn(),
},
};
});
@@ -74,17 +75,16 @@ describe('clearCommand', () => {
expect(mockResetChat).toHaveBeenCalledTimes(1);
expect(mockHintClear).toHaveBeenCalledTimes(1);
expect(uiTelemetryService.setLastPromptTokenCount).toHaveBeenCalledWith(0);
expect(uiTelemetryService.setLastPromptTokenCount).toHaveBeenCalledTimes(1);
expect(uiTelemetryService.clear).toHaveBeenCalled();
expect(uiTelemetryService.clear).toHaveBeenCalledTimes(1);
expect(mockContext.ui.clear).toHaveBeenCalledTimes(1);
// Check the order of operations.
const setDebugMessageOrder = (mockContext.ui.setDebugMessage as Mock).mock
.invocationCallOrder[0];
const resetChatOrder = mockResetChat.mock.invocationCallOrder[0];
const resetTelemetryOrder = (
uiTelemetryService.setLastPromptTokenCount as Mock
).mock.invocationCallOrder[0];
const resetTelemetryOrder = (uiTelemetryService.clear as Mock).mock
.invocationCallOrder[0];
const clearOrder = (mockContext.ui.clear as Mock).mock
.invocationCallOrder[0];
@@ -110,8 +110,8 @@ describe('clearCommand', () => {
'Clearing terminal.',
);
expect(mockResetChat).not.toHaveBeenCalled();
expect(uiTelemetryService.setLastPromptTokenCount).toHaveBeenCalledWith(0);
expect(uiTelemetryService.setLastPromptTokenCount).toHaveBeenCalledTimes(1);
expect(uiTelemetryService.clear).toHaveBeenCalled();
expect(uiTelemetryService.clear).toHaveBeenCalledTimes(1);
expect(nullConfigContext.ui.clear).toHaveBeenCalledTimes(1);
});
});

View File

@@ -23,10 +23,6 @@ export const clearCommand: SlashCommand = {
action: async (context, _args) => {
const geminiClient = context.services.config?.getGeminiClient();
const config = context.services.config;
const chatRecordingService = context.services.config
?.getGeminiClient()
?.getChat()
.getChatRecordingService();
// Fire SessionEnd hook before clearing
const hookSystem = config?.getHookSystem();
@@ -34,6 +30,18 @@ export const clearCommand: SlashCommand = {
await hookSystem.fireSessionEndEvent(SessionEndReason.Clear);
}
// Reset user steering hints
config?.userHintService.clear();
// Start a new conversation recording with a new session ID
// We MUST do this before calling resetChat() so the new ChatRecordingService
// initialized by GeminiChat picks up the new session ID.
let newSessionId: string | undefined;
if (config) {
newSessionId = randomUUID();
config.setSessionId(newSessionId);
}
if (geminiClient) {
context.ui.setDebugMessage('Clearing terminal and resetting chat.');
// If resetChat fails, the exception will propagate and halt the command,
@@ -43,16 +51,6 @@ export const clearCommand: SlashCommand = {
context.ui.setDebugMessage('Clearing terminal.');
}
// Reset user steering hints
config?.userHintService.clear();
// Start a new conversation recording with a new session ID
if (config && chatRecordingService) {
const newSessionId = randomUUID();
config.setSessionId(newSessionId);
chatRecordingService.initialize();
}
// Fire SessionStart hook after clearing
let result;
if (hookSystem) {
@@ -69,7 +67,7 @@ export const clearCommand: SlashCommand = {
await flushTelemetry(config);
}
uiTelemetryService.setLastPromptTokenCount(0);
uiTelemetryService.clear(newSessionId);
context.ui.clear();
if (result?.systemMessage) {

View File

@@ -238,6 +238,34 @@ describe('SessionStatsContext', () => {
unmount();
});
it('should update session ID and reset stats when the uiTelemetryService emits a clear event', () => {
const contextRef: MutableRefObject<
ReturnType<typeof useSessionStats> | undefined
> = { current: undefined };
const { unmount } = render(
<SessionStatsProvider>
<TestHarness contextRef={contextRef} />
</SessionStatsProvider>,
);
const initialStartTime = contextRef.current?.stats.sessionStartTime;
const newSessionId = 'new-session-id';
act(() => {
uiTelemetryService.emit('clear', newSessionId);
});
const stats = contextRef.current?.stats;
expect(stats?.sessionId).toBe(newSessionId);
expect(stats?.promptCount).toBe(0);
expect(stats?.sessionStartTime.getTime()).toBeGreaterThanOrEqual(
initialStartTime!.getTime(),
);
unmount();
});
it('should throw an error when useSessionStats is used outside of a provider', () => {
const onError = vi.fn();
// Suppress console.error from React for this test

View File

@@ -216,7 +216,17 @@ export const SessionStatsProvider: React.FC<{ children: React.ReactNode }> = ({
});
};
const handleClear = (newSessionId?: string) => {
setStats((prevState) => ({
...prevState,
sessionId: newSessionId || prevState.sessionId,
sessionStartTime: new Date(),
promptCount: 0,
}));
};
uiTelemetryService.on('update', handleUpdate);
uiTelemetryService.on('clear', handleClear);
// Set initial state
handleUpdate({
metrics: uiTelemetryService.getMetrics(),
@@ -225,6 +235,7 @@ export const SessionStatsProvider: React.FC<{ children: React.ReactNode }> = ({
return () => {
uiTelemetryService.off('update', handleUpdate);
uiTelemetryService.off('clear', handleClear);
};
}, []);

View File

@@ -23,6 +23,7 @@ import {
import {
coreEvents,
convertSessionToClientHistory,
uiTelemetryService,
} from '@google/gemini-cli-core';
// Mock modules
@@ -36,6 +37,17 @@ vi.mock('../../utils/sessionUtils.js', async (importOriginal) => {
getSessionFiles: vi.fn(),
};
});
vi.mock('@google/gemini-cli-core', async (importOriginal) => {
const actual =
await importOriginal<typeof import('@google/gemini-cli-core')>();
return {
...actual,
uiTelemetryService: {
clear: vi.fn(),
hydrate: vi.fn(),
},
};
});
const MOCKED_PROJECT_TEMP_DIR = '/test/project/temp';
const MOCKED_CHATS_DIR = '/test/project/temp/chats';
@@ -102,6 +114,7 @@ describe('useSessionBrowser', () => {
expect(mockConfig.setSessionId).toHaveBeenCalledWith(
'existing-session-456',
);
expect(uiTelemetryService.hydrate).toHaveBeenCalledWith(mockConversation);
expect(result.current.isSessionBrowserOpen).toBe(false);
expect(mockOnLoadHistory).toHaveBeenCalled();
});

View File

@@ -16,6 +16,7 @@ import type {
import {
coreEvents,
convertSessionToClientHistory,
uiTelemetryService,
} from '@google/gemini-cli-core';
import type { SessionInfo } from '../../utils/sessionUtils.js';
import { convertSessionToHistoryFormats } from '../../utils/sessionUtils.js';
@@ -68,6 +69,7 @@ export const useSessionBrowser = (
// Use the old session's ID to continue it.
const existingSessionId = conversation.sessionId;
config.setSessionId(existingSessionId);
uiTelemetryService.hydrate(conversation);
const resumedSessionData = {
conversation,

View File

@@ -171,6 +171,7 @@ export class ChatRecordingService {
this.cachedConversation = null;
} else {
// Create new session
this.sessionId = this.config.getSessionId();
const chatsDir = path.join(
this.config.storage.getProjectTempDir(),
'chats',

View File

@@ -15,6 +15,7 @@ import {
type ApiErrorEvent,
type ApiResponseEvent,
} from './types.js';
import { type ConversationRecord } from '../services/chatRecordingService.js';
import type {
CompletedToolCall,
ErroredToolCall,
@@ -698,6 +699,121 @@ describe('UiTelemetryService', () => {
});
});
describe('clear', () => {
it('should reset metrics and last prompt token count', () => {
// Set up initial state with some metrics
const event = {
'event.name': EVENT_API_RESPONSE,
model: 'gemini-2.5-pro',
duration_ms: 500,
usage: {
input_token_count: 100,
output_token_count: 200,
total_token_count: 300,
cached_content_token_count: 50,
thoughts_token_count: 20,
tool_token_count: 30,
},
} as ApiResponseEvent & { 'event.name': typeof EVENT_API_RESPONSE };
service.addEvent(event);
service.setLastPromptTokenCount(123);
expect(service.getMetrics().models['gemini-2.5-pro']).toBeDefined();
expect(service.getLastPromptTokenCount()).toBe(123);
service.clear();
expect(service.getMetrics().models).toEqual({});
expect(service.getLastPromptTokenCount()).toBe(0);
});
it('should emit clear and update events', () => {
const clearSpy = vi.fn();
const updateSpy = vi.fn();
service.on('clear', clearSpy);
service.on('update', updateSpy);
const newSessionId = 'new-session-id';
service.clear(newSessionId);
expect(clearSpy).toHaveBeenCalledWith(newSessionId);
expect(updateSpy).toHaveBeenCalledOnce();
const { metrics, lastPromptTokenCount } = updateSpy.mock.calls[0][0];
expect(metrics.models).toEqual({});
expect(lastPromptTokenCount).toBe(0);
});
});
describe('hydrate', () => {
it('should aggregate metrics from a ConversationRecord', () => {
const conversation = {
sessionId: 'resumed-session',
messages: [
{
type: 'user',
content: 'Hello',
},
{
type: 'gemini',
model: 'gemini-1.5-pro',
tokens: {
input: 10,
output: 20,
total: 30,
cached: 5,
thoughts: 2,
tool: 3,
},
toolCalls: [
{ name: 'test_tool', status: 'success' },
{ name: 'test_tool', status: 'error' },
],
},
{
type: 'gemini',
model: 'gemini-1.5-pro',
tokens: {
input: 100,
output: 200,
total: 300,
cached: 50,
thoughts: 20,
tool: 30,
},
},
],
} as unknown as ConversationRecord;
const clearSpy = vi.fn();
const updateSpy = vi.fn();
service.on('clear', clearSpy);
service.on('update', updateSpy);
service.hydrate(conversation);
expect(clearSpy).toHaveBeenCalledWith('resumed-session');
const metrics = service.getMetrics();
const modelMetrics = metrics.models['gemini-1.5-pro'];
expect(modelMetrics).toBeDefined();
expect(modelMetrics.tokens.prompt).toBe(110); // 10 + 100
expect(modelMetrics.tokens.candidates).toBe(220); // 20 + 200
expect(modelMetrics.tokens.cached).toBe(55); // 5 + 50
expect(modelMetrics.tokens.thoughts).toBe(22); // 2 + 20
expect(modelMetrics.tokens.tool).toBe(33); // 3 + 30
expect(modelMetrics.tokens.input).toBe(55); // 110 - 55
expect(metrics.tools.totalCalls).toBe(2);
expect(metrics.tools.totalSuccess).toBe(1);
expect(metrics.tools.totalFail).toBe(1);
expect(metrics.tools.byName['test_tool'].count).toBe(2);
expect(service.getLastPromptTokenCount()).toBe(300); // 100 (input) + 200 (output)
expect(updateSpy).toHaveBeenCalled();
});
});
describe('Tool Call Event with Line Count Metadata', () => {
it('should aggregate valid line count metadata', () => {
const toolCall = createFakeCompletedToolCall('test_tool', true, 100);

View File

@@ -16,6 +16,7 @@ import {
} from './types.js';
import { ToolCallDecision } from './tool-call-decision.js';
import { type ConversationRecord } from '../services/chatRecordingService.js';
export type UiEvent =
| (ApiResponseEvent & { 'event.name': typeof EVENT_API_RESPONSE })
@@ -185,6 +186,96 @@ export class UiTelemetryService extends EventEmitter {
});
}
clear(newSessionId?: string): void {
this.#metrics = createInitialMetrics();
this.#lastPromptTokenCount = 0;
this.emit('clear', newSessionId);
this.emit('update', {
metrics: this.#metrics,
lastPromptTokenCount: this.#lastPromptTokenCount,
});
}
/**
* Hydrates the telemetry metrics from a historical conversation record.
* This is used when resuming a session to restore token counts and tool stats.
*/
hydrate(conversation: ConversationRecord): void {
this.clear(conversation.sessionId);
let totalTokensInContext = 0;
for (const message of conversation.messages) {
if (message.type === 'gemini') {
const model = message.model || 'unknown';
const modelMetrics = this.getOrCreateModelMetrics(model);
// Restore API request stats
modelMetrics.api.totalRequests++;
// Restore token metrics
if (message.tokens) {
modelMetrics.tokens.prompt += message.tokens.input;
modelMetrics.tokens.candidates += message.tokens.output;
modelMetrics.tokens.total += message.tokens.total;
modelMetrics.tokens.cached += message.tokens.cached;
modelMetrics.tokens.thoughts += message.tokens.thoughts || 0;
modelMetrics.tokens.tool += message.tokens.tool || 0;
modelMetrics.tokens.input = Math.max(
0,
modelMetrics.tokens.prompt - modelMetrics.tokens.cached,
);
// The total tokens of the last Gemini message represents the context
// size at that point in time.
totalTokensInContext = message.tokens.total;
}
// Restore tool metrics
if (message.toolCalls) {
for (const toolCall of message.toolCalls) {
this.#metrics.tools.totalCalls++;
if (toolCall.status === 'success') {
this.#metrics.tools.totalSuccess++;
} else if (toolCall.status === 'error') {
this.#metrics.tools.totalFail++;
}
if (!this.#metrics.tools.byName[toolCall.name]) {
this.#metrics.tools.byName[toolCall.name] = {
count: 0,
success: 0,
fail: 0,
durationMs: 0,
decisions: {
[ToolCallDecision.ACCEPT]: 0,
[ToolCallDecision.REJECT]: 0,
[ToolCallDecision.MODIFY]: 0,
[ToolCallDecision.AUTO_ACCEPT]: 0,
},
};
}
const toolStats = this.#metrics.tools.byName[toolCall.name];
toolStats.count++;
if (toolCall.status === 'success') {
toolStats.success++;
} else if (toolCall.status === 'error') {
toolStats.fail++;
}
}
}
}
}
this.#lastPromptTokenCount = totalTokensInContext;
this.emit('update', {
metrics: this.#metrics,
lastPromptTokenCount: this.#lastPromptTokenCount,
});
}
private getOrCreateModelMetrics(modelName: string): ModelMetrics {
if (!this.#metrics.models[modelName]) {
this.#metrics.models[modelName] = createInitialModelMetrics();