Add low/full CLI error verbosity mode for cleaner UI (#20399)

This commit is contained in:
Dmitry Lyalin
2026-02-27 14:15:10 -05:00
committed by GitHub
parent 1c8951334a
commit 7f8ce8657c
25 changed files with 689 additions and 32 deletions

View File

@@ -413,6 +413,7 @@ async function readMcpResources(
name: `resources/read (${resource.serverName})`,
description: resource.uri,
status: CoreToolCallStatus.Success,
isClientInitiated: true,
resultDisplay: `Successfully read resource ${resource.uri}`,
confirmationDetails: undefined,
} as IndividualToolCallDisplay,
@@ -427,6 +428,7 @@ async function readMcpResources(
name: `resources/read (${resource.serverName})`,
description: resource.uri,
status: CoreToolCallStatus.Error,
isClientInitiated: true,
resultDisplay: `Error reading resource ${resource.uri}: ${getErrorMessage(error)}`,
confirmationDetails: undefined,
} as IndividualToolCallDisplay,
@@ -506,6 +508,7 @@ async function readLocalFiles(
name: readManyFilesTool.displayName,
description: invocation.getDescription(),
status: CoreToolCallStatus.Success,
isClientInitiated: true,
resultDisplay:
result.returnDisplay ||
`Successfully read: ${fileLabelsForDisplay.join(', ')}`,
@@ -565,6 +568,7 @@ async function readLocalFiles(
invocation?.getDescription() ??
'Error attempting to execute tool to read files',
status: CoreToolCallStatus.Error,
isClientInitiated: true,
resultDisplay: `Error reading files (${fileLabelsForDisplay.join(', ')}): ${getErrorMessage(error)}`,
confirmationDetails: undefined,
};

View File

@@ -305,6 +305,7 @@ export const useShellCommandProcessor = (
name: SHELL_COMMAND_NAME,
description: rawQuery,
status: CoreToolCallStatus.Executing,
isClientInitiated: true,
resultDisplay: '',
confirmationDetails: undefined,
};

View File

@@ -581,6 +581,7 @@ export const useSlashCommandProcessor = (
name: 'Expansion',
description: 'Command expansion needs shell access',
status: CoreToolCallStatus.AwaitingApproval,
isClientInitiated: true,
resultDisplay: undefined,
confirmationDetails,
};

View File

@@ -325,5 +325,33 @@ describe('toolMapping', () => {
const result = mapToDisplay(toolCall);
expect(result.tools[0].originalRequestName).toBe('original_tool');
});
it('propagates isClientInitiated from tool request', () => {
const clientInitiatedTool: ScheduledToolCall = {
status: CoreToolCallStatus.Scheduled,
request: {
...mockRequest,
callId: 'call-client',
isClientInitiated: true,
},
tool: mockTool,
invocation: mockInvocation,
};
const modelInitiatedTool: ScheduledToolCall = {
status: CoreToolCallStatus.Scheduled,
request: {
...mockRequest,
callId: 'call-model',
isClientInitiated: false,
},
tool: mockTool,
invocation: mockInvocation,
};
const result = mapToDisplay([clientInitiatedTool, modelInitiatedTool]);
expect(result.tools[0].isClientInitiated).toBe(true);
expect(result.tools[1].isClientInitiated).toBe(false);
});
});
});

View File

@@ -101,6 +101,7 @@ export function mapToDisplay(
return {
...baseDisplayProperties,
status: call.status,
isClientInitiated: !!call.request.isClientInitiated,
resultDisplay,
confirmationDetails,
outputFile,

View File

@@ -335,7 +335,10 @@ describe('useGeminiStream', () => {
});
const mockLoadedSettings: LoadedSettings = {
merged: { preferredEditor: 'vscode' },
merged: {
preferredEditor: 'vscode',
ui: { errorVerbosity: 'full' },
},
user: { path: '/user/settings.json', settings: {} },
workspace: { path: '/workspace/.gemini/settings.json', settings: {} },
errors: [],
@@ -346,6 +349,7 @@ describe('useGeminiStream', () => {
const renderTestHook = (
initialToolCalls: TrackedToolCall[] = [],
geminiClient?: any,
loadedSettings: LoadedSettings = mockLoadedSettings,
) => {
const client = geminiClient || mockConfig.getGeminiClient();
let lastToolCalls = initialToolCalls;
@@ -360,7 +364,7 @@ describe('useGeminiStream', () => {
cmd: PartListUnion,
) => Promise<SlashCommandProcessorResult | false>,
shellModeActive: false,
loadedSettings: mockLoadedSettings,
loadedSettings,
toolCalls: initialToolCalls,
};
@@ -969,6 +973,93 @@ describe('useGeminiStream', () => {
// Streaming state should be Idle
expect(result.current.streamingState).toBe(StreamingState.Idle);
});
const infoTexts = mockAddItem.mock.calls.map(
([item]) => (item as { text?: string }).text ?? '',
);
expect(
infoTexts.some((text) =>
text.includes(
'Some internal tool attempts failed before this final error',
),
),
).toBe(false);
expect(
infoTexts.some((text) =>
text.includes('This request failed. Press F12 for diagnostics'),
),
).toBe(false);
});
it('should add a compact suppressed-error note before STOP_EXECUTION terminal info in low verbosity mode', async () => {
const stopExecutionToolCalls: TrackedToolCall[] = [
{
request: {
callId: 'stop-call',
name: 'stopTool',
args: {},
isClientInitiated: false,
prompt_id: 'prompt-id-stop',
},
status: CoreToolCallStatus.Error,
response: {
callId: 'stop-call',
responseParts: [{ text: 'error occurred' }],
errorType: ToolErrorType.STOP_EXECUTION,
error: new Error('Stop reason from hook'),
resultDisplay: undefined,
},
responseSubmittedToGemini: false,
tool: {
displayName: 'stop tool',
},
invocation: {
getDescription: () => `Mock description`,
} as unknown as AnyToolInvocation,
} as unknown as TrackedCompletedToolCall,
];
const lowVerbositySettings = {
...mockLoadedSettings,
merged: {
...mockLoadedSettings.merged,
ui: { errorVerbosity: 'low' },
},
} as LoadedSettings;
const client = new MockedGeminiClientClass(mockConfig);
const { result } = renderTestHook([], client, lowVerbositySettings);
await act(async () => {
if (capturedOnComplete) {
await capturedOnComplete(stopExecutionToolCalls);
}
});
await waitFor(() => {
expect(mockMarkToolsAsSubmitted).toHaveBeenCalledWith(['stop-call']);
expect(mockSendMessageStream).not.toHaveBeenCalled();
expect(result.current.streamingState).toBe(StreamingState.Idle);
});
const infoTexts = mockAddItem.mock.calls.map(
([item]) => (item as { text?: string }).text ?? '',
);
const noteIndex = infoTexts.findIndex((text) =>
text.includes(
'Some internal tool attempts failed before this final error',
),
);
const stopIndex = infoTexts.findIndex((text) =>
text.includes('Agent execution stopped: Stop reason from hook'),
);
const failureHintIndex = infoTexts.findIndex((text) =>
text.includes('This request failed. Press F12 for diagnostics'),
);
expect(noteIndex).toBeGreaterThanOrEqual(0);
expect(stopIndex).toBeGreaterThanOrEqual(0);
expect(failureHintIndex).toBeGreaterThanOrEqual(0);
expect(noteIndex).toBeLessThan(stopIndex);
expect(stopIndex).toBeLessThan(failureHintIndex);
});
it('should group multiple cancelled tool call responses into a single history entry', async () => {

View File

@@ -107,6 +107,11 @@ enum StreamProcessingStatus {
Error,
}
const SUPPRESSED_TOOL_ERRORS_NOTE =
'Some internal tool attempts failed before this final error. Press F12 for diagnostics, or set ui.errorVerbosity to full for full details.';
const LOW_VERBOSITY_FAILURE_NOTE =
'This request failed. Press F12 for diagnostics, or set ui.errorVerbosity to full for full details.';
function isShellToolData(data: unknown): data is ShellToolData {
if (typeof data !== 'object' || data === null) {
return false;
@@ -202,6 +207,10 @@ export const useGeminiStream = (
const [retryStatus, setRetryStatus] = useState<RetryAttemptPayload | null>(
null,
);
const isLowErrorVerbosity = settings.merged.ui?.errorVerbosity !== 'full';
const suppressedToolErrorCountRef = useRef(0);
const suppressedToolErrorNoteShownRef = useRef(false);
const lowVerbosityFailureNoteShownRef = useRef(false);
const abortControllerRef = useRef<AbortController | null>(null);
const turnCancelledRef = useRef(false);
const activeQueryIdRef = useRef<string | null>(null);
@@ -559,6 +568,51 @@ export const useGeminiStream = (
}
}, [isResponding]);
const maybeAddSuppressedToolErrorNote = useCallback(
(userMessageTimestamp?: number) => {
if (!isLowErrorVerbosity) {
return;
}
if (suppressedToolErrorCountRef.current === 0) {
return;
}
if (suppressedToolErrorNoteShownRef.current) {
return;
}
addItem(
{
type: MessageType.INFO,
text: SUPPRESSED_TOOL_ERRORS_NOTE,
},
userMessageTimestamp,
);
suppressedToolErrorNoteShownRef.current = true;
},
[addItem, isLowErrorVerbosity],
);
const maybeAddLowVerbosityFailureNote = useCallback(
(userMessageTimestamp?: number) => {
if (!isLowErrorVerbosity || config.getDebugMode()) {
return;
}
if (lowVerbosityFailureNoteShownRef.current) {
return;
}
addItem(
{
type: MessageType.INFO,
text: LOW_VERBOSITY_FAILURE_NOTE,
},
userMessageTimestamp,
);
lowVerbosityFailureNoteShownRef.current = true;
},
[addItem, config, isLowErrorVerbosity],
);
const cancelOngoingRequest = useCallback(() => {
if (
streamingState !== StreamingState.Responding &&
@@ -908,6 +962,7 @@ export const useGeminiStream = (
addItem(pendingHistoryItemRef.current, userMessageTimestamp);
setPendingHistoryItem(null);
}
maybeAddSuppressedToolErrorNote(userMessageTimestamp);
addItem(
{
type: MessageType.ERROR,
@@ -921,9 +976,18 @@ export const useGeminiStream = (
},
userMessageTimestamp,
);
maybeAddLowVerbosityFailureNote(userMessageTimestamp);
setThought(null); // Reset thought when there's an error
},
[addItem, pendingHistoryItemRef, setPendingHistoryItem, config, setThought],
[
addItem,
pendingHistoryItemRef,
setPendingHistoryItem,
config,
setThought,
maybeAddSuppressedToolErrorNote,
maybeAddLowVerbosityFailureNote,
],
);
const handleCitationEvent = useCallback(
@@ -1086,6 +1150,7 @@ export const useGeminiStream = (
},
userMessageTimestamp,
);
maybeAddLowVerbosityFailureNote(userMessageTimestamp);
if (contextCleared) {
addItem(
{
@@ -1097,7 +1162,13 @@ export const useGeminiStream = (
}
setIsResponding(false);
},
[addItem, pendingHistoryItemRef, setPendingHistoryItem, setIsResponding],
[
addItem,
pendingHistoryItemRef,
setPendingHistoryItem,
setIsResponding,
maybeAddLowVerbosityFailureNote,
],
);
const handleAgentExecutionBlockedEvent = useCallback(
@@ -1118,6 +1189,7 @@ export const useGeminiStream = (
},
userMessageTimestamp,
);
maybeAddLowVerbosityFailureNote(userMessageTimestamp);
if (contextCleared) {
addItem(
{
@@ -1128,7 +1200,12 @@ export const useGeminiStream = (
);
}
},
[addItem, pendingHistoryItemRef, setPendingHistoryItem],
[
addItem,
pendingHistoryItemRef,
setPendingHistoryItem,
maybeAddLowVerbosityFailureNote,
],
);
const processGeminiStreamEvents = useCallback(
@@ -1286,6 +1363,9 @@ export const useGeminiStream = (
if (!options?.isContinuation) {
setModelSwitchedFromQuotaError(false);
config.setQuotaErrorOccurred(false);
suppressedToolErrorCountRef.current = 0;
suppressedToolErrorNoteShownRef.current = false;
lowVerbosityFailureNoteShownRef.current = false;
}
abortControllerRef.current = new AbortController();
@@ -1402,6 +1482,7 @@ export const useGeminiStream = (
) {
// Error was handled by validation dialog, don't display again
} else if (!isNodeError(error) || error.name !== 'AbortError') {
maybeAddSuppressedToolErrorNote(userMessageTimestamp);
addItem(
{
type: MessageType.ERROR,
@@ -1415,6 +1496,7 @@ export const useGeminiStream = (
},
userMessageTimestamp,
);
maybeAddLowVerbosityFailureNote(userMessageTimestamp);
}
} finally {
if (activeQueryIdRef.current === queryId) {
@@ -1439,6 +1521,8 @@ export const useGeminiStream = (
startNewPrompt,
getPromptCount,
setThought,
maybeAddSuppressedToolErrorNote,
maybeAddLowVerbosityFailureNote,
],
);
@@ -1587,6 +1671,13 @@ export const useGeminiStream = (
(t) => !t.request.isClientInitiated,
);
if (isLowErrorVerbosity) {
// Low-mode suppression applies only to model-initiated tool failures.
suppressedToolErrorCountRef.current += geminiTools.filter(
(tc) => tc.status === CoreToolCallStatus.Error,
).length;
}
if (geminiTools.length === 0) {
return;
}
@@ -1597,10 +1688,12 @@ export const useGeminiStream = (
);
if (stopExecutionTool && stopExecutionTool.response.error) {
maybeAddSuppressedToolErrorNote();
addItem({
type: MessageType.INFO,
text: `Agent execution stopped: ${stopExecutionTool.response.error.message}`,
});
maybeAddLowVerbosityFailureNote();
setIsResponding(false);
const callIdsToMarkAsSubmitted = geminiTools.map(
@@ -1706,6 +1799,9 @@ export const useGeminiStream = (
registerBackgroundShell,
consumeUserHint,
config,
isLowErrorVerbosity,
maybeAddSuppressedToolErrorNote,
maybeAddLowVerbosityFailureNote,
],
);

View File

@@ -35,6 +35,7 @@ describe('useLoadingIndicator', () => {
initialShouldShowFocusHint: boolean = false,
initialRetryStatus: RetryAttemptPayload | null = null,
loadingPhrasesMode: LoadingPhrasesMode = 'all',
initialErrorVerbosity: 'low' | 'full' = 'full',
) => {
let hookResult: ReturnType<typeof useLoadingIndicator>;
function TestComponent({
@@ -42,17 +43,20 @@ describe('useLoadingIndicator', () => {
shouldShowFocusHint,
retryStatus,
mode,
errorVerbosity,
}: {
streamingState: StreamingState;
shouldShowFocusHint?: boolean;
retryStatus?: RetryAttemptPayload | null;
mode?: LoadingPhrasesMode;
errorVerbosity?: 'low' | 'full';
}) {
hookResult = useLoadingIndicator({
streamingState,
shouldShowFocusHint: !!shouldShowFocusHint,
retryStatus: retryStatus || null,
loadingPhrasesMode: mode,
errorVerbosity,
});
return null;
}
@@ -62,6 +66,7 @@ describe('useLoadingIndicator', () => {
shouldShowFocusHint={initialShouldShowFocusHint}
retryStatus={initialRetryStatus}
mode={loadingPhrasesMode}
errorVerbosity={initialErrorVerbosity}
/>,
);
return {
@@ -75,7 +80,15 @@ describe('useLoadingIndicator', () => {
shouldShowFocusHint?: boolean;
retryStatus?: RetryAttemptPayload | null;
mode?: LoadingPhrasesMode;
}) => rerender(<TestComponent mode={loadingPhrasesMode} {...newProps} />),
errorVerbosity?: 'low' | 'full';
}) =>
rerender(
<TestComponent
mode={loadingPhrasesMode}
errorVerbosity={initialErrorVerbosity}
{...newProps}
/>,
),
};
};
@@ -229,6 +242,46 @@ describe('useLoadingIndicator', () => {
expect(result.current.currentLoadingPhrase).toContain('Attempt 3/3');
});
it('should hide low-verbosity retry status for early retry attempts', () => {
const retryStatus = {
model: 'gemini-pro',
attempt: 1,
maxAttempts: 5,
delayMs: 1000,
};
const { result } = renderLoadingIndicatorHook(
StreamingState.Responding,
false,
retryStatus,
'all',
'low',
);
expect(result.current.currentLoadingPhrase).not.toBe(
"This is taking a bit longer, we're still on it.",
);
});
it('should show a generic retry phrase in low error verbosity mode for later retries', () => {
const retryStatus = {
model: 'gemini-pro',
attempt: 2,
maxAttempts: 5,
delayMs: 1000,
};
const { result } = renderLoadingIndicatorHook(
StreamingState.Responding,
false,
retryStatus,
'all',
'low',
);
expect(result.current.currentLoadingPhrase).toBe(
"This is taking a bit longer, we're still on it.",
);
});
it('should show no phrases when loadingPhrasesMode is "off"', () => {
const { result } = renderLoadingIndicatorHook(
StreamingState.Responding,

View File

@@ -14,12 +14,15 @@ import {
} from '@google/gemini-cli-core';
import type { LoadingPhrasesMode } from '../../config/settings.js';
const LOW_VERBOSITY_RETRY_HINT_ATTEMPT_THRESHOLD = 2;
export interface UseLoadingIndicatorProps {
streamingState: StreamingState;
shouldShowFocusHint: boolean;
retryStatus: RetryAttemptPayload | null;
loadingPhrasesMode?: LoadingPhrasesMode;
customWittyPhrases?: string[];
errorVerbosity?: 'low' | 'full';
}
export const useLoadingIndicator = ({
@@ -28,6 +31,7 @@ export const useLoadingIndicator = ({
retryStatus,
loadingPhrasesMode,
customWittyPhrases,
errorVerbosity = 'full',
}: UseLoadingIndicatorProps) => {
const [timerResetKey, setTimerResetKey] = useState(0);
const isTimerActive = streamingState === StreamingState.Responding;
@@ -70,7 +74,11 @@ export const useLoadingIndicator = ({
}, [streamingState, elapsedTimeFromTimer]);
const retryPhrase = retryStatus
? `Trying to reach ${getDisplayString(retryStatus.model)} (Attempt ${retryStatus.attempt + 1}/${retryStatus.maxAttempts})`
? errorVerbosity === 'low'
? retryStatus.attempt >= LOW_VERBOSITY_RETRY_HINT_ATTEMPT_THRESHOLD
? "This is taking a bit longer, we're still on it."
: null
: `Trying to reach ${getDisplayString(retryStatus.model)} (Attempt ${retryStatus.attempt + 1}/${retryStatus.maxAttempts})`
: null;
return {

View File

@@ -161,6 +161,67 @@ describe('useQuotaAndFallback', () => {
);
});
it('should auto-retry transient capacity failures in low verbosity mode', async () => {
const { result } = renderHook(() =>
useQuotaAndFallback({
config: mockConfig,
historyManager: mockHistoryManager,
userTier: UserTierId.FREE,
setModelSwitchedFromQuotaError: mockSetModelSwitchedFromQuotaError,
onShowAuthSelection: mockOnShowAuthSelection,
paidTier: null,
settings: mockSettings,
errorVerbosity: 'low',
}),
);
const handler = setFallbackHandlerSpy.mock
.calls[0][0] as FallbackModelHandler;
const intent = await handler(
'gemini-pro',
'gemini-flash',
new RetryableQuotaError('retryable quota', mockGoogleApiError, 5),
);
expect(intent).toBe('retry_once');
expect(result.current.proQuotaRequest).toBeNull();
expect(mockSetModelSwitchedFromQuotaError).not.toHaveBeenCalledWith(true);
expect(mockConfig.setQuotaErrorOccurred).not.toHaveBeenCalledWith(true);
});
it('should still prompt for terminal quota in low verbosity mode', async () => {
const { result } = renderHook(() =>
useQuotaAndFallback({
config: mockConfig,
historyManager: mockHistoryManager,
userTier: UserTierId.FREE,
setModelSwitchedFromQuotaError: mockSetModelSwitchedFromQuotaError,
onShowAuthSelection: mockOnShowAuthSelection,
paidTier: null,
settings: mockSettings,
errorVerbosity: 'low',
}),
);
const handler = setFallbackHandlerSpy.mock
.calls[0][0] as FallbackModelHandler;
let promise: Promise<FallbackIntent | null>;
act(() => {
promise = handler(
'gemini-pro',
'gemini-flash',
new TerminalQuotaError('pro quota', mockGoogleApiError),
);
});
expect(result.current.proQuotaRequest).not.toBeNull();
act(() => {
result.current.handleProQuotaChoice('retry_later');
});
await promise!;
});
describe('Interactive Fallback', () => {
it('should set an interactive request for a terminal quota error', async () => {
const { result } = renderHook(() =>

View File

@@ -42,6 +42,7 @@ interface UseQuotaAndFallbackArgs {
settings: LoadedSettings;
setModelSwitchedFromQuotaError: (value: boolean) => void;
onShowAuthSelection: () => void;
errorVerbosity?: 'low' | 'full';
}
export function useQuotaAndFallback({
@@ -52,6 +53,7 @@ export function useQuotaAndFallback({
settings,
setModelSwitchedFromQuotaError,
onShowAuthSelection,
errorVerbosity = 'full',
}: UseQuotaAndFallbackArgs) {
const [proQuotaRequest, setProQuotaRequest] =
useState<ProQuotaDialogRequest | null>(null);
@@ -165,6 +167,16 @@ export function useQuotaAndFallback({
message = messageLines.join('\n');
}
// In low verbosity mode, auto-retry transient capacity failures
// without interrupting with a dialog.
if (
errorVerbosity === 'low' &&
!isTerminalQuotaError &&
!isModelNotFoundError
) {
return 'retry_once';
}
setModelSwitchedFromQuotaError(true);
config.setQuotaErrorOccurred(true);
@@ -200,6 +212,7 @@ export function useQuotaAndFallback({
initialOverageStrategy,
setModelSwitchedFromQuotaError,
onShowAuthSelection,
errorVerbosity,
]);
// Set up validation handler for 403 VALIDATION_REQUIRED errors