2025-06-11 17:47:21 +00:00
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
2025-08-26 00:04:53 +02:00
import {
2026-02-13 11:27:20 -05:00
CoreToolCallStatus ,
2025-08-26 00:04:53 +02:00
AuthType ,
2026-01-05 15:25:54 -05:00
EditTool ,
2025-06-22 09:26:48 -05:00
GeminiClient ,
ToolConfirmationOutcome ,
2025-08-01 11:20:08 -04:00
ToolErrorType ,
2025-06-22 09:26:48 -05:00
ToolRegistry ,
2026-03-04 05:42:59 +05:30
type AnyDeclarativeTool ,
type AnyToolInvocation ,
type CompletedToolCall ,
type ContentGeneratorConfig ,
type ErroredToolCall ,
type MessageBus ,
2025-06-22 09:26:48 -05:00
} from '../index.js' ;
2025-09-11 22:42:10 +09:00
import { OutputFormat } from '../output/types.js' ;
2025-06-11 17:47:21 +00:00
import { logs } from '@opentelemetry/api-logs' ;
2026-02-21 13:33:25 -05:00
import type { Config , GeminiCLIExtension } from '../config/config.js' ;
2026-02-24 19:15:14 -05:00
import { ApprovalMode } from '../policy/types.js' ;
2025-06-12 19:36:51 -04:00
import {
2025-10-28 13:02:46 -07:00
logApiError ,
2025-06-12 19:36:51 -04:00
logApiRequest ,
2025-06-11 21:43:00 -04:00
logApiResponse ,
logCliConfiguration ,
logUserPrompt ,
2025-06-12 16:48:10 -04:00
logToolCall ,
2025-07-12 02:40:25 +05:30
logFlashFallback ,
2025-08-18 15:59:13 -04:00
logChatCompression ,
2025-08-24 19:11:41 -07:00
logMalformedJsonResponse ,
2026-03-04 14:27:47 -05:00
logInvalidChunk ,
2025-09-04 10:20:46 -07:00
logFileOperation ,
2025-09-08 14:44:56 -07:00
logRipgrepFallback ,
2025-09-11 08:40:46 -07:00
logToolOutputTruncated ,
2025-09-16 16:53:58 -04:00
logModelRouting ,
2025-09-18 14:01:36 -04:00
logExtensionEnable ,
2025-09-23 14:37:35 -04:00
logExtensionDisable ,
2025-09-18 14:01:36 -04:00
logExtensionInstallEvent ,
logExtensionUninstall ,
2025-10-08 15:42:33 -04:00
logAgentStart ,
logAgentFinish ,
2025-10-09 13:01:17 -04:00
logWebFetchFallbackAttempt ,
2026-03-11 14:55:48 -04:00
logNetworkRetryAttempt ,
2025-10-10 14:28:13 -07:00
logExtensionUpdateEvent ,
2025-12-21 16:18:42 -05:00
logHookCall ,
2025-06-11 21:43:00 -04:00
} from './loggers.js' ;
2025-08-07 19:58:18 -04:00
import { ToolCallDecision } from './tool-call-decision.js' ;
2025-06-22 09:26:48 -05:00
import {
2025-10-28 13:02:46 -07:00
EVENT_API_ERROR ,
2025-10-09 16:02:58 -07:00
EVENT_API_REQUEST ,
EVENT_API_RESPONSE ,
EVENT_CLI_CONFIG ,
EVENT_TOOL_CALL ,
EVENT_USER_PROMPT ,
EVENT_FLASH_FALLBACK ,
EVENT_MALFORMED_JSON_RESPONSE ,
EVENT_FILE_OPERATION ,
EVENT_RIPGREP_FALLBACK ,
EVENT_MODEL_ROUTING ,
EVENT_EXTENSION_ENABLE ,
EVENT_EXTENSION_DISABLE ,
EVENT_EXTENSION_INSTALL ,
EVENT_EXTENSION_UNINSTALL ,
EVENT_TOOL_OUTPUT_TRUNCATED ,
EVENT_AGENT_START ,
EVENT_AGENT_FINISH ,
EVENT_WEB_FETCH_FALLBACK_ATTEMPT ,
2026-03-04 14:27:47 -05:00
EVENT_INVALID_CHUNK ,
2026-03-11 14:55:48 -04:00
EVENT_NETWORK_RETRY_ATTEMPT ,
2025-10-28 13:02:46 -07:00
ApiErrorEvent ,
2025-06-22 09:26:48 -05:00
ApiRequestEvent ,
ApiResponseEvent ,
StartSessionEvent ,
ToolCallEvent ,
UserPromptEvent ,
2025-07-12 02:40:25 +05:30
FlashFallbackEvent ,
2025-09-08 14:44:56 -07:00
RipgrepFallbackEvent ,
2025-08-24 19:11:41 -07:00
MalformedJsonResponseEvent ,
2026-03-04 14:27:47 -05:00
InvalidChunkEvent ,
2025-08-18 15:59:13 -04:00
makeChatCompressionEvent ,
2025-09-04 10:20:46 -07:00
FileOperationEvent ,
2025-09-11 08:40:46 -07:00
ToolOutputTruncatedEvent ,
2025-09-16 16:53:58 -04:00
ModelRoutingEvent ,
2025-09-18 14:01:36 -04:00
ExtensionEnableEvent ,
2025-09-23 14:37:35 -04:00
ExtensionDisableEvent ,
2025-09-18 14:01:36 -04:00
ExtensionInstallEvent ,
ExtensionUninstallEvent ,
2025-10-08 15:42:33 -04:00
AgentStartEvent ,
AgentFinishEvent ,
2025-10-09 13:01:17 -04:00
WebFetchFallbackAttemptEvent ,
2026-03-11 14:55:48 -04:00
NetworkRetryAttemptEvent ,
2025-10-10 14:28:13 -07:00
ExtensionUpdateEvent ,
EVENT_EXTENSION_UPDATE ,
2025-12-21 16:18:42 -05:00
HookCallEvent ,
EVENT_HOOK_CALL ,
2026-02-17 12:32:30 -05:00
LlmRole ,
2025-06-22 09:26:48 -05:00
} from './types.js' ;
2026-02-24 13:03:36 -08:00
import { HookType } from '../hooks/types.js' ;
2025-06-11 17:47:21 +00:00
import * as metrics from './metrics.js' ;
2025-10-28 13:02:46 -07:00
import { FileOperation } from './metrics.js' ;
2025-06-11 17:47:21 +00:00
import * as sdk from './sdk.js' ;
2026-01-04 17:11:43 -05:00
import { createMockMessageBus } from '../test-utils/mock-message-bus.js' ;
2025-09-18 14:01:36 -04:00
import { vi , describe , beforeEach , it , expect , afterEach } from 'vitest' ;
2025-10-28 13:02:46 -07:00
import {
FinishReason ,
type CallableTool ,
type GenerateContentResponseUsageMetadata ,
2025-09-18 15:00:11 -07:00
} from '@google/genai' ;
import { DiscoveredMCPTool } from '../tools/mcp-tool.js' ;
2025-06-29 20:44:33 -04:00
import * as uiTelemetry from './uiTelemetry.js' ;
2025-08-18 15:59:13 -04:00
import { makeFakeConfig } from '../test-utils/config.js' ;
import { ClearcutLogger } from './clearcut-logger/clearcut-logger.js' ;
2025-08-25 14:20:17 -07:00
import { UserAccountManager } from '../utils/userAccountManager.js' ;
2025-10-10 12:06:08 -04:00
import { InstallationManager } from '../utils/installationManager.js' ;
2025-10-08 15:42:33 -04:00
import { AgentTerminateMode } from '../agents/types.js' ;
2025-06-11 17:47:21 +00:00
2026-01-12 23:59:22 +00:00
vi . mock ( 'systeminformation' , ( ) = > ( {
default : {
graphics : vi.fn ( ) . mockResolvedValue ( {
controllers : [ { model : 'Mock GPU' } ] ,
} ) ,
} ,
} ) ) ;
2025-06-11 20:15:44 +00:00
describe ( 'loggers' , ( ) = > {
2025-06-11 17:47:21 +00:00
const mockLogger = {
emit : vi.fn ( ) ,
} ;
2025-06-29 20:44:33 -04:00
const mockUiEvent = {
addEvent : vi.fn ( ) ,
} ;
2025-06-11 17:47:21 +00:00
beforeEach ( ( ) = > {
2025-08-20 19:52:22 +05:30
vi . clearAllMocks ( ) ;
2025-06-11 17:47:21 +00:00
vi . spyOn ( sdk , 'isTelemetrySdkInitialized' ) . mockReturnValue ( true ) ;
2025-12-02 21:27:37 -08:00
vi . spyOn ( sdk , 'bufferTelemetryEvent' ) . mockImplementation ( ( cb ) = > cb ( ) ) ;
2025-06-11 17:47:21 +00:00
vi . spyOn ( logs , 'getLogger' ) . mockReturnValue ( mockLogger ) ;
2025-06-29 20:44:33 -04:00
vi . spyOn ( uiTelemetry . uiTelemetryService , 'addEvent' ) . mockImplementation (
mockUiEvent . addEvent ,
) ;
2025-08-25 14:20:17 -07:00
vi . spyOn (
UserAccountManager . prototype ,
'getCachedGoogleAccount' ,
) . mockReturnValue ( 'test-user@example.com' ) ;
2025-10-10 12:06:08 -04:00
vi . spyOn (
InstallationManager . prototype ,
'getInstallationId' ,
) . mockReturnValue ( 'test-installation-id' ) ;
2025-06-11 17:47:21 +00:00
vi . useFakeTimers ( ) ;
vi . setSystemTime ( new Date ( '2025-01-01T00:00:00.000Z' ) ) ;
} ) ;
2025-08-18 15:59:13 -04:00
describe ( 'logChatCompression' , ( ) = > {
beforeEach ( ( ) = > {
vi . spyOn ( metrics , 'recordChatCompressionMetrics' ) ;
vi . spyOn ( ClearcutLogger . prototype , 'logChatCompressionEvent' ) ;
} ) ;
it ( 'logs the chat compression event to Clearcut' , ( ) = > {
const mockConfig = makeFakeConfig ( ) ;
const event = makeChatCompressionEvent ( {
tokens_before : 9001 ,
tokens_after : 9000 ,
} ) ;
logChatCompression ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logChatCompressionEvent ,
) . toHaveBeenCalledWith ( event ) ;
} ) ;
it ( 'records the chat compression event to OTEL' , ( ) = > {
const mockConfig = makeFakeConfig ( ) ;
logChatCompression (
mockConfig ,
makeChatCompressionEvent ( {
tokens_before : 9001 ,
tokens_after : 9000 ,
} ) ,
) ;
expect ( metrics . recordChatCompressionMetrics ) . toHaveBeenCalledWith (
mockConfig ,
{ tokens_before : 9001 , tokens_after : 9000 } ,
) ;
} ) ;
} ) ;
2025-06-11 20:15:44 +00:00
describe ( 'logCliConfiguration' , ( ) = > {
2026-02-10 13:01:35 -08:00
it ( 'should log the cli configuration' , async ( ) = > {
2025-06-11 20:15:44 +00:00
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getModel : ( ) = > 'test-model' ,
getEmbeddingModel : ( ) = > 'test-embedding-model' ,
getSandbox : ( ) = > true ,
getCoreTools : ( ) = > [ 'ls' , 'read-file' ] ,
getApprovalMode : ( ) = > 'default' ,
getContentGeneratorConfig : ( ) = > ( {
model : 'test-model' ,
apiKey : 'test-api-key' ,
2025-06-19 16:52:22 -07:00
authType : AuthType.USE_VERTEX_AI ,
2025-06-11 20:15:44 +00:00
} ) ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-06-15 00:47:32 -04:00
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-06-11 20:15:44 +00:00
getFileFilteringRespectGitIgnore : ( ) = > true ,
2025-06-22 09:26:48 -05:00
getFileFilteringAllowBuildArtifacts : ( ) = > false ,
2025-06-11 20:15:44 +00:00
getDebugMode : ( ) = > true ,
2025-11-04 07:51:18 -08:00
getMcpServers : ( ) = > {
throw new Error ( 'Should not call' ) ;
} ,
2025-06-11 20:15:44 +00:00
getQuestion : ( ) = > 'test-question' ,
2025-06-22 09:26:48 -05:00
getTargetDir : ( ) = > 'target-dir' ,
getProxy : ( ) = > 'http://test.proxy.com:8080' ,
2025-09-11 22:42:10 +09:00
getOutputFormat : ( ) = > OutputFormat . JSON ,
2025-10-29 15:21:32 -07:00
getExtensions : ( ) = >
[
{ name : 'ext-one' , id : 'id-one' } ,
{ name : 'ext-two' , id : 'id-two' } ,
] as GeminiCLIExtension [ ] ,
2025-11-04 07:51:18 -08:00
getMcpClientManager : ( ) = > ( {
getMcpServers : ( ) = > ( {
'test-server' : {
command : 'test-command' ,
} ,
} ) ,
} ) ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-06-11 20:15:44 +00:00
} as unknown as Config ;
2025-06-22 09:26:48 -05:00
const startSessionEvent = new StartSessionEvent ( mockConfig ) ;
logCliConfiguration ( mockConfig , startSessionEvent ) ;
2025-06-11 20:15:44 +00:00
2026-02-10 13:01:35 -08:00
await new Promise ( process . nextTick ) ;
2025-06-11 20:15:44 +00:00
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'CLI configuration loaded.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_CLI_CONFIG ,
2025-06-11 20:15:44 +00:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-11 20:15:44 +00:00
model : 'test-model' ,
embedding_model : 'test-embedding-model' ,
sandbox_enabled : true ,
core_tools_enabled : 'ls,read-file' ,
approval_mode : 'default' ,
api_key_enabled : true ,
vertex_ai_enabled : true ,
log_user_prompts_enabled : true ,
file_filtering_respect_git_ignore : true ,
debug_mode : true ,
mcp_servers : 'test-server' ,
2025-08-21 00:25:42 -07:00
mcp_servers_count : 1 ,
2025-08-20 19:52:22 +05:30
mcp_tools : undefined ,
mcp_tools_count : undefined ,
2025-09-11 22:42:10 +09:00
output_format : 'json' ,
2025-10-29 15:21:32 -07:00
extension_ids : 'id-one,id-two' ,
extensions_count : 2 ,
extensions : 'ext-one,ext-two' ,
2025-10-29 12:28:28 -07:00
auth_type : 'vertex-ai' ,
2025-06-11 20:15:44 +00:00
} ,
} ) ;
} ) ;
} ) ;
2025-06-11 21:43:00 -04:00
describe ( 'logUserPrompt' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
2025-06-15 00:47:32 -04:00
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-06-11 21:43:00 -04:00
} as unknown as Config ;
it ( 'should log a user prompt' , ( ) = > {
2025-07-11 22:17:46 +05:30
const event = new UserPromptEvent (
11 ,
'prompt-id-8' ,
AuthType . USE_VERTEX_AI ,
'test-prompt' ,
) ;
2025-06-11 21:43:00 -04:00
logUserPrompt ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-06-13 03:44:17 -04:00
body : 'User prompt. Length: 11.' ,
2025-06-11 21:43:00 -04:00
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-11 21:43:00 -04:00
'event.name' : EVENT_USER_PROMPT ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-11 21:43:00 -04:00
prompt_length : 11 ,
prompt : 'test-prompt' ,
2025-08-27 19:17:32 -07:00
prompt_id : 'prompt-id-8' ,
auth_type : 'vertex-ai' ,
2025-06-11 21:43:00 -04:00
} ,
} ) ;
} ) ;
it ( 'should not log prompt if disabled' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
2025-06-15 00:47:32 -04:00
getTelemetryLogPromptsEnabled : ( ) = > false ,
2025-06-22 09:26:48 -05:00
getTargetDir : ( ) = > 'target-dir' ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-06-11 21:43:00 -04:00
} as unknown as Config ;
2025-07-11 22:17:46 +05:30
const event = new UserPromptEvent (
11 ,
2025-08-27 19:17:32 -07:00
'prompt-id-9' ,
2025-11-14 11:39:11 -05:00
AuthType . COMPUTE_ADC ,
2025-08-27 19:17:32 -07:00
'test-prompt' ,
2025-07-11 22:17:46 +05:30
) ;
2025-06-11 21:43:00 -04:00
logUserPrompt ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-06-13 03:44:17 -04:00
body : 'User prompt. Length: 11.' ,
2025-06-11 21:43:00 -04:00
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-11 21:43:00 -04:00
'event.name' : EVENT_USER_PROMPT ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-11 21:43:00 -04:00
prompt_length : 11 ,
2025-08-27 19:17:32 -07:00
prompt_id : 'prompt-id-9' ,
2025-11-14 11:39:11 -05:00
auth_type : 'compute-default-credentials' ,
2025-06-11 21:43:00 -04:00
} ,
} ) ;
} ) ;
} ) ;
2025-06-11 20:15:44 +00:00
describe ( 'logApiResponse' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
2025-06-22 09:26:48 -05:00
getTargetDir : ( ) = > 'target-dir' ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
} as unknown as Config ;
2025-06-11 20:15:44 +00:00
const mockMetrics = {
recordApiResponseMetrics : vi.fn ( ) ,
recordTokenUsageMetrics : vi.fn ( ) ,
2025-06-11 17:47:21 +00:00
} ;
2025-06-11 20:15:44 +00:00
beforeEach ( ( ) = > {
vi . spyOn ( metrics , 'recordApiResponseMetrics' ) . mockImplementation (
mockMetrics . recordApiResponseMetrics ,
) ;
vi . spyOn ( metrics , 'recordTokenUsageMetrics' ) . mockImplementation (
mockMetrics . recordTokenUsageMetrics ,
) ;
} ) ;
2025-06-11 17:47:21 +00:00
2025-06-11 20:15:44 +00:00
it ( 'should log an API response with all fields' , ( ) = > {
2025-06-22 09:26:48 -05:00
const usageData : GenerateContentResponseUsageMetadata = {
promptTokenCount : 17 ,
candidatesTokenCount : 50 ,
cachedContentTokenCount : 10 ,
thoughtsTokenCount : 5 ,
toolUsePromptTokenCount : 2 ,
2025-06-11 20:15:44 +00:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ApiResponseEvent (
'test-model' ,
100 ,
2025-10-28 13:02:46 -07:00
{
prompt_id : 'prompt-id-1' ,
contents : [
{
role : 'user' ,
parts : [ { text : 'Hello' } ] ,
} ,
] ,
generate_content_config : {
temperature : 1 ,
topP : 2 ,
topK : 3 ,
responseMimeType : 'text/plain' ,
candidateCount : 1 ,
seed : 678 ,
frequencyPenalty : 10 ,
maxOutputTokens : 8000 ,
presencePenalty : 6 ,
stopSequences : [ 'stop' , 'please stop' ] ,
systemInstruction : {
role : 'model' ,
parts : [ { text : 'be nice' } ] ,
} ,
} ,
server : {
address : 'foo.com' ,
port : 8080 ,
} ,
} ,
{
response_id : '' ,
candidates : [
{
content : {
role : 'model' ,
parts : [ { text : 'candidate 1' } ] ,
} ,
finishReason : FinishReason.STOP ,
} ,
] ,
} ,
2025-07-11 22:17:46 +05:30
AuthType . LOGIN_WITH_GOOGLE ,
2025-06-22 09:26:48 -05:00
usageData ,
'test-response' ,
) ;
2025-06-11 20:15:44 +00:00
logApiResponse ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'API response from test-model. Status: 200. Duration: 100ms.' ,
2025-10-28 13:02:46 -07:00
attributes : expect.objectContaining ( {
2025-06-11 20:15:44 +00:00
'event.name' : EVENT_API_RESPONSE ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-1' ,
2025-11-26 20:46:40 -08:00
finish_reasons : [ 'stop' ] ,
2025-10-28 13:02:46 -07:00
} ) ,
} ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'GenAI operation details from test-model. Status: 200. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.request.temperature' : 1 ,
'gen_ai.request.top_p' : 2 ,
'gen_ai.request.top_k' : 3 ,
'gen_ai.input.messages' :
'[{"role":"user","parts":[{"type":"text","content":"Hello"}]}]' ,
'gen_ai.output.messages' :
'[{"finish_reason":"stop","role":"system","parts":[{"type":"text","content":"candidate 1"}]}]' ,
'gen_ai.response.finish_reasons' : [ 'stop' ] ,
'gen_ai.response.model' : 'test-model' ,
'gen_ai.usage.input_tokens' : 17 ,
'gen_ai.usage.output_tokens' : 50 ,
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.output.type' : 'text' ,
'gen_ai.request.choice.count' : 1 ,
'gen_ai.request.seed' : 678 ,
'gen_ai.request.frequency_penalty' : 10 ,
'gen_ai.request.presence_penalty' : 6 ,
'gen_ai.request.max_tokens' : 8000 ,
'server.address' : 'foo.com' ,
'server.port' : 8080 ,
'gen_ai.request.stop_sequences' : [ 'stop' , 'please stop' ] ,
'gen_ai.system_instructions' : '[{"type":"text","content":"be nice"}]' ,
} ) ,
2025-06-11 20:15:44 +00:00
} ) ;
expect ( mockMetrics . recordApiResponseMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 16:24:55 -04:00
{
model : 'test-model' ,
status_code : 200 ,
genAiAttributes : {
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.response.model' : 'test-model' ,
} ,
} ,
) ;
// Verify token usage calls for all token types
expect ( mockMetrics . recordTokenUsageMetrics ) . toHaveBeenCalledWith (
mockConfig ,
17 ,
{
model : 'test-model' ,
type : 'input' ,
genAiAttributes : {
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.response.model' : 'test-model' ,
} ,
} ,
2025-06-11 20:15:44 +00:00
) ;
expect ( mockMetrics . recordTokenUsageMetrics ) . toHaveBeenCalledWith (
mockConfig ,
50 ,
2025-10-01 16:24:55 -04:00
{
model : 'test-model' ,
type : 'output' ,
genAiAttributes : {
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.response.model' : 'test-model' ,
} ,
} ,
2025-06-11 20:15:44 +00:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_API_RESPONSE ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-06-11 17:47:21 +00:00
} ) ;
2026-02-17 12:32:30 -05:00
it ( 'should log an API response with a role' , ( ) = > {
const event = new ApiResponseEvent (
'test-model' ,
100 ,
{ prompt_id : 'prompt-id-role' , contents : [ ] } ,
{ candidates : [ ] } ,
AuthType . LOGIN_WITH_GOOGLE ,
{ } ,
'test-response' ,
LlmRole . SUBAGENT ,
) ;
logApiResponse ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'API response from test-model. Status: 200. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
'event.name' : EVENT_API_RESPONSE ,
prompt_id : 'prompt-id-role' ,
role : 'subagent' ,
} ) ,
} ) ;
} ) ;
2025-10-28 13:02:46 -07:00
} ) ;
describe ( 'logApiError' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getUsageStatisticsEnabled : ( ) = > true ,
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
} as unknown as Config ;
2025-10-28 13:02:46 -07:00
const mockMetrics = {
recordApiResponseMetrics : vi.fn ( ) ,
recordApiErrorMetrics : vi.fn ( ) ,
recordTokenUsageMetrics : vi.fn ( ) ,
} ;
beforeEach ( ( ) = > {
vi . spyOn ( metrics , 'recordApiResponseMetrics' ) . mockImplementation (
mockMetrics . recordApiResponseMetrics ,
) ;
vi . spyOn ( metrics , 'recordApiErrorMetrics' ) . mockImplementation (
mockMetrics . recordApiErrorMetrics ,
) ;
} ) ;
it ( 'should log an API error with all fields' , ( ) = > {
const event = new ApiErrorEvent (
'test-model' ,
'UNAVAILABLE. {"error":{"code":503,"message":"The model is overloaded. Please try again later.","status":"UNAVAILABLE"}}' ,
100 ,
{
prompt_id : 'prompt-id-1' ,
contents : [
{
role : 'user' ,
parts : [ { text : 'Hello' } ] ,
} ,
] ,
generate_content_config : {
temperature : 1 ,
topP : 2 ,
topK : 3 ,
responseMimeType : 'text/plain' ,
candidateCount : 1 ,
seed : 678 ,
frequencyPenalty : 10 ,
maxOutputTokens : 8000 ,
presencePenalty : 6 ,
stopSequences : [ 'stop' , 'please stop' ] ,
systemInstruction : {
role : 'model' ,
parts : [ { text : 'be nice' } ] ,
} ,
} ,
server : {
address : 'foo.com' ,
port : 8080 ,
} ,
} ,
AuthType . LOGIN_WITH_GOOGLE ,
'ApiError' ,
503 ,
) ;
logApiError ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'API error for test-model. Error: UNAVAILABLE. {"error":{"code":503,"message":"The model is overloaded. Please try again later.","status":"UNAVAILABLE"}}. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
'event.name' : EVENT_API_ERROR ,
prompt_id : 'prompt-id-1' ,
} ) ,
} ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'GenAI operation error details from test-model. Error: UNAVAILABLE. {"error":{"code":503,"message":"The model is overloaded. Please try again later.","status":"UNAVAILABLE"}}. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.request.temperature' : 1 ,
'gen_ai.request.top_p' : 2 ,
'gen_ai.request.top_k' : 3 ,
'gen_ai.input.messages' :
'[{"role":"user","parts":[{"type":"text","content":"Hello"}]}]' ,
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.output.type' : 'text' ,
'gen_ai.request.choice.count' : 1 ,
'gen_ai.request.seed' : 678 ,
'gen_ai.request.frequency_penalty' : 10 ,
'gen_ai.request.presence_penalty' : 6 ,
'gen_ai.request.max_tokens' : 8000 ,
'server.address' : 'foo.com' ,
'server.port' : 8080 ,
'gen_ai.request.stop_sequences' : [ 'stop' , 'please stop' ] ,
'gen_ai.system_instructions' : '[{"type":"text","content":"be nice"}]' ,
} ) ,
} ) ;
expect ( mockMetrics . recordApiErrorMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
{
model : 'test-model' ,
status_code : 503 ,
error_type : 'ApiError' ,
} ,
) ;
expect ( mockMetrics . recordApiResponseMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
{
model : 'test-model' ,
status_code : 503 ,
genAiAttributes : {
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.response.model' : 'test-model' ,
'error.type' : 'ApiError' ,
} ,
} ,
) ;
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_API_ERROR ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
} ) ;
2026-02-17 12:32:30 -05:00
it ( 'should log an API error with a role' , ( ) = > {
const event = new ApiErrorEvent (
'test-model' ,
'error' ,
100 ,
{ prompt_id : 'prompt-id-role' , contents : [ ] } ,
AuthType . LOGIN_WITH_GOOGLE ,
'ApiError' ,
503 ,
LlmRole . SUBAGENT ,
) ;
logApiError ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'API error for test-model. Error: error. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
'event.name' : EVENT_API_ERROR ,
prompt_id : 'prompt-id-role' ,
role : 'subagent' ,
} ) ,
} ) ;
} ) ;
2025-06-11 17:47:21 +00:00
} ) ;
2025-06-12 16:48:10 -04:00
2025-06-12 19:36:51 -04:00
describe ( 'logApiRequest' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
2025-06-22 09:26:48 -05:00
getTargetDir : ( ) = > 'target-dir' ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-11-30 09:05:24 +05:30
getContentGeneratorConfig : ( ) = > ( {
authType : AuthType.LOGIN_WITH_GOOGLE ,
} ) ,
2025-06-12 19:36:51 -04:00
} as Config ;
it ( 'should log an API request with request_text' , ( ) = > {
2025-07-10 00:19:30 +05:30
const event = new ApiRequestEvent (
'test-model' ,
2025-11-30 09:05:24 +05:30
{
prompt_id : 'prompt-id-7' ,
contents : [ ] ,
} ,
2025-07-10 00:19:30 +05:30
'This is a test request' ,
) ;
2025-06-12 19:36:51 -04:00
logApiRequest ( mockConfig , event ) ;
2025-11-30 09:05:24 +05:30
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 1 , {
2025-06-13 03:44:17 -04:00
body : 'API request to test-model.' ,
2025-11-30 09:05:24 +05:30
attributes : expect.objectContaining ( {
2025-06-12 19:36:51 -04:00
'event.name' : EVENT_API_REQUEST ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 19:36:51 -04:00
model : 'test-model' ,
request_text : 'This is a test request' ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-7' ,
2025-11-30 09:05:24 +05:30
} ) ,
} ) ;
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 2 , {
body : 'GenAI operation request details from test-model.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
} ) ,
2025-06-12 19:36:51 -04:00
} ) ;
} ) ;
it ( 'should log an API request without request_text' , ( ) = > {
2025-11-30 09:05:24 +05:30
const event = new ApiRequestEvent ( 'test-model' , {
prompt_id : 'prompt-id-6' ,
contents : [ ] ,
} ) ;
2025-06-12 19:36:51 -04:00
logApiRequest ( mockConfig , event ) ;
2025-11-30 09:05:24 +05:30
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 1 , {
2025-06-13 03:44:17 -04:00
body : 'API request to test-model.' ,
2025-11-30 09:05:24 +05:30
attributes : expect.objectContaining ( {
2025-06-12 19:36:51 -04:00
'event.name' : EVENT_API_REQUEST ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 19:36:51 -04:00
model : 'test-model' ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-6' ,
2025-11-30 09:05:24 +05:30
} ) ,
} ) ;
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 2 , {
body : 'GenAI operation request details from test-model.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.provider.name' : 'gcp.vertex_ai' ,
} ) ,
} ) ;
} ) ;
it ( 'should log an API request with full semantic details when logPrompts is enabled' , ( ) = > {
const mockConfigWithPrompts = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getUsageStatisticsEnabled : ( ) = > true ,
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true , // Enabled
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-11-30 09:05:24 +05:30
getContentGeneratorConfig : ( ) = > ( {
authType : AuthType.USE_GEMINI ,
} ) ,
} as Config ;
const promptDetails = {
prompt_id : 'prompt-id-semantic-1' ,
contents : [
{
role : 'user' ,
parts : [ { text : 'Semantic request test' } ] ,
} ,
] ,
generate_content_config : {
temperature : 0.5 ,
topP : 0.8 ,
topK : 10 ,
responseMimeType : 'application/json' ,
candidateCount : 1 ,
stopSequences : [ 'end' ] ,
systemInstruction : {
role : 'model' ,
parts : [ { text : 'be helpful' } ] ,
} ,
} ,
server : {
address : 'semantic-api.example.com' ,
port : 8080 ,
2025-06-12 19:36:51 -04:00
} ,
2025-11-30 09:05:24 +05:30
} ;
const event = new ApiRequestEvent (
'test-model' ,
promptDetails ,
'Full semantic request' ,
) ;
logApiRequest ( mockConfigWithPrompts , event ) ;
// Expect two calls to emit: one for the regular log, one for the semantic log
expect ( mockLogger . emit ) . toHaveBeenCalledTimes ( 2 ) ;
// Verify the first (original) log record
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 1 , {
body : 'API request to test-model.' ,
attributes : expect.objectContaining ( {
'event.name' : EVENT_API_REQUEST ,
prompt_id : 'prompt-id-semantic-1' ,
} ) ,
} ) ;
// Verify the second (semantic) log record
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 2 , {
body : 'GenAI operation request details from test-model.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'test-model' ,
'gen_ai.request.temperature' : 0.5 ,
'gen_ai.request.top_p' : 0.8 ,
'gen_ai.request.top_k' : 10 ,
'gen_ai.input.messages' : JSON . stringify ( [
{
role : 'user' ,
parts : [ { type : 'text' , content : 'Semantic request test' } ] ,
} ,
] ) ,
'server.address' : 'semantic-api.example.com' ,
'server.port' : 8080 ,
'gen_ai.operation.name' : 'generate_content' ,
'gen_ai.provider.name' : 'gcp.gen_ai' ,
'gen_ai.output.type' : 'json' ,
'gen_ai.request.stop_sequences' : [ 'end' ] ,
'gen_ai.system_instructions' : JSON . stringify ( [
{ type : 'text' , content : 'be helpful' } ,
] ) ,
} ) ,
} ) ;
} ) ;
it ( 'should log an API request with semantic details, but without prompts when logPrompts is disabled' , ( ) = > {
const mockConfigWithoutPrompts = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getUsageStatisticsEnabled : ( ) = > true ,
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > false , // Disabled
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-11-30 09:05:24 +05:30
getContentGeneratorConfig : ( ) = > ( {
authType : AuthType.USE_VERTEX_AI ,
} ) ,
} as Config ;
const promptDetails = {
prompt_id : 'prompt-id-semantic-2' ,
contents : [
{
role : 'user' ,
parts : [ { text : 'This prompt should be hidden' } ] ,
} ,
] ,
generate_content_config : { } ,
model : 'gemini-1.0-pro' ,
} ;
const event = new ApiRequestEvent (
'gemini-1.0-pro' ,
promptDetails ,
'Request with hidden prompt' ,
) ;
logApiRequest ( mockConfigWithoutPrompts , event ) ;
// Expect two calls to emit
expect ( mockLogger . emit ) . toHaveBeenCalledTimes ( 2 ) ;
// Get the arguments of the second (semantic) log call
const semanticLogCall = mockLogger . emit . mock . calls [ 1 ] [ 0 ] ;
// Assert on the body
expect ( semanticLogCall . body ) . toBe (
'GenAI operation request details from gemini-1.0-pro.' ,
) ;
// Assert on specific attributes
const attributes = semanticLogCall . attributes ;
expect ( attributes [ 'event.name' ] ) . toBe (
'gen_ai.client.inference.operation.details' ,
) ;
expect ( attributes [ 'gen_ai.request.model' ] ) . toBe ( 'gemini-1.0-pro' ) ;
expect ( attributes [ 'gen_ai.provider.name' ] ) . toBe ( 'gcp.vertex_ai' ) ;
// Ensure prompt messages are NOT included
expect ( attributes [ 'gen_ai.input.messages' ] ) . toBeUndefined ( ) ;
} ) ;
it ( 'should correctly derive model from prompt details if available in semantic log' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-11-30 09:05:24 +05:30
getUsageStatisticsEnabled : ( ) = > true ,
getContentGeneratorConfig : ( ) = > ( {
authType : AuthType.USE_GEMINI ,
} ) ,
} as Config ;
const promptDetails = {
prompt_id : 'prompt-id-semantic-3' ,
contents : [ ] ,
model : 'my-custom-model' ,
} ;
const event = new ApiRequestEvent (
'my-custom-model' ,
promptDetails ,
'Request with custom model' ,
) ;
logApiRequest ( mockConfig , event ) ;
// Verify the second (semantic) log record
expect ( mockLogger . emit ) . toHaveBeenNthCalledWith ( 2 , {
body : 'GenAI operation request details from my-custom-model.' ,
attributes : expect.objectContaining ( {
'event.name' : 'gen_ai.client.inference.operation.details' ,
'gen_ai.request.model' : 'my-custom-model' ,
} ) ,
2025-06-12 19:36:51 -04:00
} ) ;
} ) ;
2026-02-17 12:32:30 -05:00
it ( 'should log an API request with a role' , ( ) = > {
const event = new ApiRequestEvent (
'test-model' ,
{ prompt_id : 'prompt-id-role' , contents : [ ] } ,
'request text' ,
LlmRole . SUBAGENT ,
) ;
logApiRequest ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'API request to test-model.' ,
attributes : expect.objectContaining ( {
'event.name' : EVENT_API_REQUEST ,
prompt_id : 'prompt-id-role' ,
role : 'subagent' ,
} ) ,
} ) ;
} ) ;
2025-06-12 19:36:51 -04:00
} ) ;
2025-07-12 02:40:25 +05:30
describe ( 'logFlashFallback' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-07-12 02:40:25 +05:30
} as unknown as Config ;
it ( 'should log flash fallback event' , ( ) = > {
const event = new FlashFallbackEvent ( AuthType . USE_VERTEX_AI ) ;
logFlashFallback ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Switching to flash as Fallback.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-07-12 02:40:25 +05:30
'event.name' : EVENT_FLASH_FALLBACK ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-07-12 02:40:25 +05:30
auth_type : 'vertex-ai' ,
} ,
} ) ;
} ) ;
} ) ;
2025-09-08 14:44:56 -07:00
describe ( 'logRipgrepFallback' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-09-08 14:44:56 -07:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logRipgrepFallbackEvent' ) ;
} ) ;
it ( 'should log ripgrep fallback event' , ( ) = > {
const event = new RipgrepFallbackEvent ( ) ;
logRipgrepFallback ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logRipgrepFallbackEvent ,
) . toHaveBeenCalled ( ) ;
const emittedEvent = mockLogger . emit . mock . calls [ 0 ] [ 0 ] ;
expect ( emittedEvent . body ) . toBe ( 'Switching to grep as fallback.' ) ;
expect ( emittedEvent . attributes ) . toEqual (
expect . objectContaining ( {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-08 14:44:56 -07:00
'event.name' : EVENT_RIPGREP_FALLBACK ,
error : undefined ,
} ) ,
) ;
} ) ;
it ( 'should log ripgrep fallback event with an error' , ( ) = > {
const event = new RipgrepFallbackEvent ( 'rg not found' ) ;
logRipgrepFallback ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logRipgrepFallbackEvent ,
) . toHaveBeenCalled ( ) ;
const emittedEvent = mockLogger . emit . mock . calls [ 0 ] [ 0 ] ;
expect ( emittedEvent . body ) . toBe ( 'Switching to grep as fallback.' ) ;
expect ( emittedEvent . attributes ) . toEqual (
expect . objectContaining ( {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-08 14:44:56 -07:00
'event.name' : EVENT_RIPGREP_FALLBACK ,
error : 'rg not found' ,
} ) ,
) ;
} ) ;
} ) ;
2025-06-12 16:48:10 -04:00
describe ( 'logToolCall' , ( ) = > {
2025-06-22 09:26:48 -05:00
const cfg1 = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getGeminiClient : ( ) = > mockGeminiClient ,
} as Config ;
const cfg2 = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getProxy : ( ) = > 'http://test.proxy.com:8080' ,
getContentGeneratorConfig : ( ) = >
( { model : 'test-model' } ) as ContentGeneratorConfig ,
getModel : ( ) = > 'test-model' ,
getEmbeddingModel : ( ) = > 'test-embedding-model' ,
getWorkingDir : ( ) = > 'test-working-dir' ,
getSandbox : ( ) = > true ,
getCoreTools : ( ) = > [ 'ls' , 'read-file' ] ,
getApprovalMode : ( ) = > 'default' ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
getFileFilteringRespectGitIgnore : ( ) = > true ,
getFileFilteringAllowBuildArtifacts : ( ) = > false ,
getDebugMode : ( ) = > true ,
getMcpServers : ( ) = > ( {
'test-server' : {
command : 'test-command' ,
} ,
} ) ,
getQuestion : ( ) = > 'test-question' ,
2026-01-04 17:11:43 -05:00
getToolRegistry : ( ) = >
new ToolRegistry ( cfg1 , { } as unknown as MessageBus ) ,
2025-10-16 12:09:21 -07:00
2025-06-22 09:26:48 -05:00
getUserMemory : ( ) = > 'user-memory' ,
} as unknown as Config ;
2026-03-10 18:12:59 -07:00
( cfg2 as unknown as { config : Config ; promptId : string } ) . config = cfg2 ;
( cfg2 as unknown as { config : Config ; promptId : string } ) . promptId =
'test-prompt-id' ;
2025-06-22 09:26:48 -05:00
const mockGeminiClient = new GeminiClient ( cfg2 ) ;
2025-06-12 16:48:10 -04:00
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
2025-06-22 09:26:48 -05:00
getTargetDir : ( ) = > 'target-dir' ,
getGeminiClient : ( ) = > mockGeminiClient ,
2025-06-23 17:19:40 -04:00
getUsageStatisticsEnabled : ( ) = > true ,
2025-06-22 09:26:48 -05:00
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
} as unknown as Config ;
2025-06-12 16:48:10 -04:00
const mockMetrics = {
recordToolCallMetrics : vi.fn ( ) ,
2025-10-30 14:55:47 -04:00
recordLinesChanged : vi.fn ( ) ,
2025-06-12 16:48:10 -04:00
} ;
beforeEach ( ( ) = > {
vi . spyOn ( metrics , 'recordToolCallMetrics' ) . mockImplementation (
mockMetrics . recordToolCallMetrics ,
) ;
2025-10-30 14:55:47 -04:00
vi . spyOn ( metrics , 'recordLinesChanged' ) . mockImplementation (
mockMetrics . recordLinesChanged ,
) ;
2025-06-12 16:48:10 -04:00
mockLogger . emit . mockReset ( ) ;
} ) ;
it ( 'should log a tool call with all fields' , ( ) = > {
2026-01-05 15:25:54 -05:00
const tool = new EditTool ( mockConfig , createMockMessageBus ( ) ) ;
2025-06-22 09:26:48 -05:00
const call : CompletedToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-06-22 09:26:48 -05:00
request : {
name : 'test-function' ,
args : {
arg1 : 'value1' ,
arg2 : 2 ,
} ,
callId : 'test-call-id' ,
isClientInitiated : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-1' ,
2025-06-22 09:26:48 -05:00
} ,
response : {
callId : 'test-call-id' ,
2025-08-22 14:12:05 -07:00
responseParts : [ { text : 'test-response' } ] ,
2025-09-07 10:41:15 -07:00
resultDisplay : {
fileDiff : 'diff' ,
fileName : 'file.txt' ,
2026-01-07 12:10:22 -05:00
filePath : 'file.txt' ,
2025-09-07 10:41:15 -07:00
originalContent : 'old content' ,
newContent : 'new content' ,
diffStat : {
model_added_lines : 1 ,
model_removed_lines : 2 ,
model_added_chars : 3 ,
model_removed_chars : 4 ,
user_added_lines : 5 ,
user_removed_lines : 6 ,
user_added_chars : 7 ,
user_removed_chars : 8 ,
} ,
} ,
2025-06-22 09:26:48 -05:00
error : undefined ,
2025-08-01 11:20:08 -04:00
errorType : undefined ,
2025-09-11 08:40:46 -07:00
contentLength : 13 ,
2025-06-12 16:48:10 -04:00
} ,
2025-08-06 10:50:02 -07:00
tool ,
invocation : { } as AnyToolInvocation ,
2025-06-22 09:26:48 -05:00
durationMs : 100 ,
outcome : ToolConfirmationOutcome.ProceedOnce ,
2025-06-12 16:48:10 -04:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ToolCallEvent ( call ) ;
2025-06-12 16:48:10 -04:00
2025-06-22 09:26:48 -05:00
logToolCall ( mockConfig , event ) ;
2025-06-12 16:48:10 -04:00
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: test-function. Decision: accept. Success: true. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_TOOL_CALL ,
2025-06-12 16:48:10 -04:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 16:48:10 -04:00
function_name : 'test-function' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : true ,
decision : ToolCallDecision.ACCEPT ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-1' ,
2025-08-19 10:55:47 +05:30
tool_type : 'native' ,
2025-08-25 14:20:17 -07:00
error : undefined ,
error_type : undefined ,
2025-11-11 02:03:32 -08:00
mcp_server_name : undefined ,
extension_id : undefined ,
2025-09-07 10:41:15 -07:00
metadata : {
model_added_lines : 1 ,
model_removed_lines : 2 ,
model_added_chars : 3 ,
model_removed_chars : 4 ,
user_added_lines : 5 ,
user_removed_lines : 6 ,
user_added_chars : 7 ,
user_removed_chars : 8 ,
} ,
2025-09-11 08:40:46 -07:00
content_length : 13 ,
2025-06-12 16:48:10 -04:00
} ,
} ) ;
expect ( mockMetrics . recordToolCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 06:33:47 -07:00
{
function_name : 'test-function' ,
success : true ,
decision : ToolCallDecision.ACCEPT ,
tool_type : 'native' ,
} ,
2025-06-12 16:48:10 -04:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-10-30 14:55:47 -04:00
expect ( mockMetrics . recordLinesChanged ) . toHaveBeenCalledWith (
mockConfig ,
1 ,
'added' ,
{ function_name : 'test-function' } ,
) ;
expect ( mockMetrics . recordLinesChanged ) . toHaveBeenCalledWith (
mockConfig ,
2 ,
'removed' ,
{ function_name : 'test-function' } ,
) ;
2025-06-12 16:48:10 -04:00
} ) ;
2026-02-12 12:46:59 -05:00
it ( 'should merge data from response into metadata' , ( ) = > {
const call : CompletedToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2026-02-12 12:46:59 -05:00
request : {
name : 'ask_user' ,
args : { questions : [ ] } ,
callId : 'test-call-id' ,
isClientInitiated : true ,
prompt_id : 'prompt-id-1' ,
} ,
response : {
callId : 'test-call-id' ,
responseParts : [ { text : 'test-response' } ] ,
resultDisplay : 'User answered: ...' ,
error : undefined ,
errorType : undefined ,
data : {
ask_user : {
question_types : [ 'choice' ] ,
dismissed : false ,
} ,
} ,
} ,
tool : undefined as unknown as AnyDeclarativeTool ,
invocation : { } as AnyToolInvocation ,
durationMs : 100 ,
outcome : ToolConfirmationOutcome.ProceedOnce ,
} ;
const event = new ToolCallEvent ( call ) ;
logToolCall ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: ask_user. Decision: accept. Success: true. Duration: 100ms.' ,
attributes : expect.objectContaining ( {
function_name : 'ask_user' ,
metadata : expect.objectContaining ( {
ask_user : {
question_types : [ 'choice' ] ,
dismissed : false ,
} ,
} ) ,
} ) ,
} ) ;
} ) ;
2025-06-12 16:48:10 -04:00
it ( 'should log a tool call with a reject decision' , ( ) = > {
2025-06-22 09:26:48 -05:00
const call : ErroredToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Error ,
2025-06-22 09:26:48 -05:00
request : {
name : 'test-function' ,
args : {
arg1 : 'value1' ,
arg2 : 2 ,
} ,
callId : 'test-call-id' ,
isClientInitiated : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-2' ,
2025-06-12 16:48:10 -04:00
} ,
2025-06-22 09:26:48 -05:00
response : {
callId : 'test-call-id' ,
2025-08-22 14:12:05 -07:00
responseParts : [ { text : 'test-response' } ] ,
2025-06-22 09:26:48 -05:00
resultDisplay : undefined ,
error : undefined ,
2025-08-01 11:20:08 -04:00
errorType : undefined ,
2025-09-11 08:40:46 -07:00
contentLength : undefined ,
2025-06-22 09:26:48 -05:00
} ,
durationMs : 100 ,
outcome : ToolConfirmationOutcome.Cancel ,
2025-06-12 16:48:10 -04:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ToolCallEvent ( call ) ;
2025-06-12 16:48:10 -04:00
2025-06-22 09:26:48 -05:00
logToolCall ( mockConfig , event ) ;
2025-06-12 16:48:10 -04:00
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: test-function. Decision: reject. Success: false. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_TOOL_CALL ,
2025-06-12 16:48:10 -04:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 16:48:10 -04:00
function_name : 'test-function' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : false ,
decision : ToolCallDecision.REJECT ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-2' ,
2025-08-19 10:55:47 +05:30
tool_type : 'native' ,
2025-08-25 14:20:17 -07:00
error : undefined ,
error_type : undefined ,
2025-11-11 02:03:32 -08:00
mcp_server_name : undefined ,
extension_id : undefined ,
2025-08-25 14:20:17 -07:00
metadata : undefined ,
2025-09-11 08:40:46 -07:00
content_length : undefined ,
2025-06-12 16:48:10 -04:00
} ,
} ) ;
expect ( mockMetrics . recordToolCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 06:33:47 -07:00
{
function_name : 'test-function' ,
success : false ,
decision : ToolCallDecision.REJECT ,
tool_type : 'native' ,
} ,
2025-06-12 16:48:10 -04:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-06-12 16:48:10 -04:00
} ) ;
it ( 'should log a tool call with a modify decision' , ( ) = > {
2025-06-22 09:26:48 -05:00
const call : CompletedToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-06-22 09:26:48 -05:00
request : {
name : 'test-function' ,
args : {
arg1 : 'value1' ,
arg2 : 2 ,
} ,
callId : 'test-call-id' ,
isClientInitiated : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-3' ,
2025-06-22 09:26:48 -05:00
} ,
response : {
callId : 'test-call-id' ,
2025-08-22 14:12:05 -07:00
responseParts : [ { text : 'test-response' } ] ,
2025-06-22 09:26:48 -05:00
resultDisplay : undefined ,
error : undefined ,
2025-08-01 11:20:08 -04:00
errorType : undefined ,
2025-09-11 08:40:46 -07:00
contentLength : 13 ,
2025-06-12 16:48:10 -04:00
} ,
2025-06-22 09:26:48 -05:00
outcome : ToolConfirmationOutcome.ModifyWithEditor ,
2026-01-05 15:25:54 -05:00
tool : new EditTool ( mockConfig , createMockMessageBus ( ) ) ,
2025-08-06 10:50:02 -07:00
invocation : { } as AnyToolInvocation ,
2025-06-22 09:26:48 -05:00
durationMs : 100 ,
2025-06-12 16:48:10 -04:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ToolCallEvent ( call ) ;
2025-06-12 16:48:10 -04:00
2025-06-22 09:26:48 -05:00
logToolCall ( mockConfig , event ) ;
2025-06-12 16:48:10 -04:00
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: test-function. Decision: modify. Success: true. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_TOOL_CALL ,
2025-06-12 16:48:10 -04:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 16:48:10 -04:00
function_name : 'test-function' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : true ,
decision : ToolCallDecision.MODIFY ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-3' ,
2025-08-19 10:55:47 +05:30
tool_type : 'native' ,
2025-08-25 14:20:17 -07:00
error : undefined ,
error_type : undefined ,
2025-11-11 02:03:32 -08:00
mcp_server_name : undefined ,
extension_id : undefined ,
2025-08-25 14:20:17 -07:00
metadata : undefined ,
2025-09-11 08:40:46 -07:00
content_length : 13 ,
2025-06-12 16:48:10 -04:00
} ,
} ) ;
expect ( mockMetrics . recordToolCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 06:33:47 -07:00
{
function_name : 'test-function' ,
success : true ,
decision : ToolCallDecision.MODIFY ,
tool_type : 'native' ,
} ,
2025-06-12 16:48:10 -04:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-06-12 16:48:10 -04:00
} ) ;
it ( 'should log a tool call without a decision' , ( ) = > {
2025-06-22 09:26:48 -05:00
const call : CompletedToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-06-22 09:26:48 -05:00
request : {
name : 'test-function' ,
args : {
arg1 : 'value1' ,
arg2 : 2 ,
} ,
callId : 'test-call-id' ,
isClientInitiated : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-4' ,
2025-06-22 09:26:48 -05:00
} ,
response : {
callId : 'test-call-id' ,
2025-08-22 14:12:05 -07:00
responseParts : [ { text : 'test-response' } ] ,
2025-06-22 09:26:48 -05:00
resultDisplay : undefined ,
error : undefined ,
2025-08-01 11:20:08 -04:00
errorType : undefined ,
2025-09-11 08:40:46 -07:00
contentLength : 13 ,
2025-06-12 16:48:10 -04:00
} ,
2026-01-05 15:25:54 -05:00
tool : new EditTool ( mockConfig , createMockMessageBus ( ) ) ,
2025-08-06 10:50:02 -07:00
invocation : { } as AnyToolInvocation ,
2025-06-22 09:26:48 -05:00
durationMs : 100 ,
2025-06-12 16:48:10 -04:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ToolCallEvent ( call ) ;
2025-06-12 16:48:10 -04:00
logToolCall ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: test-function. Success: true. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_TOOL_CALL ,
2025-06-12 16:48:10 -04:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 16:48:10 -04:00
function_name : 'test-function' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-4' ,
2025-08-19 10:55:47 +05:30
tool_type : 'native' ,
2025-08-25 14:20:17 -07:00
decision : undefined ,
error : undefined ,
error_type : undefined ,
2025-11-11 02:03:32 -08:00
mcp_server_name : undefined ,
extension_id : undefined ,
2025-08-25 14:20:17 -07:00
metadata : undefined ,
2025-09-11 08:40:46 -07:00
content_length : 13 ,
2025-06-12 16:48:10 -04:00
} ,
} ) ;
expect ( mockMetrics . recordToolCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 06:33:47 -07:00
{
function_name : 'test-function' ,
success : true ,
decision : undefined ,
tool_type : 'native' ,
} ,
2025-06-12 16:48:10 -04:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-06-12 16:48:10 -04:00
} ) ;
it ( 'should log a failed tool call with an error' , ( ) = > {
2025-09-11 08:40:46 -07:00
const errorMessage = 'test-error' ;
2025-06-22 09:26:48 -05:00
const call : ErroredToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Error ,
2025-06-22 09:26:48 -05:00
request : {
name : 'test-function' ,
args : {
arg1 : 'value1' ,
arg2 : 2 ,
} ,
callId : 'test-call-id' ,
isClientInitiated : true ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-5' ,
2025-06-12 16:48:10 -04:00
} ,
2025-06-22 09:26:48 -05:00
response : {
callId : 'test-call-id' ,
2025-08-22 14:12:05 -07:00
responseParts : [ { text : 'test-response' } ] ,
2025-06-22 09:26:48 -05:00
resultDisplay : undefined ,
2025-09-11 08:40:46 -07:00
error : new Error ( errorMessage ) ,
2025-08-01 11:20:08 -04:00
errorType : ToolErrorType.UNKNOWN ,
2025-09-11 08:40:46 -07:00
contentLength : errorMessage.length ,
2025-06-22 09:26:48 -05:00
} ,
durationMs : 100 ,
2025-06-12 16:48:10 -04:00
} ;
2025-06-22 09:26:48 -05:00
const event = new ToolCallEvent ( call ) ;
2025-06-12 16:48:10 -04:00
logToolCall ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: test-function. Success: false. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-06-22 09:26:48 -05:00
'event.name' : EVENT_TOOL_CALL ,
2025-06-12 16:48:10 -04:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-06-12 16:48:10 -04:00
function_name : 'test-function' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : false ,
error : 'test-error' ,
'error.message' : 'test-error' ,
2025-08-01 11:20:08 -04:00
error_type : ToolErrorType.UNKNOWN ,
'error.type' : ToolErrorType . UNKNOWN ,
2025-07-10 00:19:30 +05:30
prompt_id : 'prompt-id-5' ,
2025-08-19 10:55:47 +05:30
tool_type : 'native' ,
2025-08-25 14:20:17 -07:00
decision : undefined ,
2025-11-11 02:03:32 -08:00
mcp_server_name : undefined ,
extension_id : undefined ,
2025-08-25 14:20:17 -07:00
metadata : undefined ,
2025-09-11 08:40:46 -07:00
content_length : errorMessage.length ,
2025-06-12 16:48:10 -04:00
} ,
} ) ;
expect ( mockMetrics . recordToolCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
100 ,
2025-10-01 06:33:47 -07:00
{
function_name : 'test-function' ,
success : false ,
decision : undefined ,
tool_type : 'native' ,
} ,
2025-06-12 16:48:10 -04:00
) ;
2025-06-29 20:44:33 -04:00
expect ( mockUiEvent . addEvent ) . toHaveBeenCalledWith ( {
. . . event ,
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
} ) ;
2025-06-12 16:48:10 -04:00
} ) ;
2025-09-18 15:00:11 -07:00
it ( 'should log a tool call with mcp_server_name for MCP tools' , ( ) = > {
const mockMcpTool = new DiscoveredMCPTool (
{ } as CallableTool ,
'mock_mcp_server' ,
'mock_mcp_tool' ,
'tool description' ,
{
type : 'object' ,
properties : {
arg1 : { type : 'string' } ,
arg2 : { type : 'number' } ,
} ,
required : [ 'arg1' , 'arg2' ] ,
} ,
2026-01-04 17:11:43 -05:00
createMockMessageBus ( ) ,
2025-10-21 16:55:16 -04:00
false ,
undefined ,
undefined ,
2026-02-05 16:37:28 -05:00
undefined ,
2025-10-21 16:55:16 -04:00
'test-extension' ,
2025-10-31 06:50:22 -07:00
'test-extension-id' ,
2025-09-18 15:00:11 -07:00
) ;
const call : CompletedToolCall = {
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-09-18 15:00:11 -07:00
request : {
name : 'mock_mcp_tool' ,
args : { arg1 : 'value1' , arg2 : 2 } ,
callId : 'test-call-id' ,
isClientInitiated : true ,
prompt_id : 'prompt-id' ,
} ,
response : {
callId : 'test-call-id' ,
responseParts : [ { text : 'test-response' } ] ,
resultDisplay : undefined ,
error : undefined ,
errorType : undefined ,
} ,
tool : mockMcpTool ,
invocation : { } as AnyToolInvocation ,
durationMs : 100 ,
} ;
const event = new ToolCallEvent ( call ) ;
logToolCall ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool call: mock_mcp_tool. Success: true. Duration: 100ms.' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-18 15:00:11 -07:00
'event.name' : EVENT_TOOL_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-10-31 06:50:22 -07:00
extension_name : 'test-extension' ,
extension_id : 'test-extension-id' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-09-18 15:00:11 -07:00
function_name : 'mock_mcp_tool' ,
function_args : JSON.stringify (
{
arg1 : 'value1' ,
arg2 : 2 ,
} ,
null ,
2 ,
) ,
duration_ms : 100 ,
success : true ,
prompt_id : 'prompt-id' ,
tool_type : 'mcp' ,
mcp_server_name : 'mock_mcp_server' ,
decision : undefined ,
error : undefined ,
error_type : undefined ,
metadata : undefined ,
2025-10-21 16:55:16 -04:00
content_length : undefined ,
2025-09-18 15:00:11 -07:00
} ,
} ) ;
} ) ;
2025-06-12 16:48:10 -04:00
} ) ;
2025-08-24 19:11:41 -07:00
describe ( 'logMalformedJsonResponse' , ( ) = > {
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logMalformedJsonResponseEvent' ) ;
} ) ;
it ( 'logs the event to Clearcut and OTEL' , ( ) = > {
const mockConfig = makeFakeConfig ( ) ;
const event = new MalformedJsonResponseEvent ( 'test-model' ) ;
logMalformedJsonResponse ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logMalformedJsonResponseEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Malformed JSON response from test-model.' ,
attributes : {
'session.id' : 'test-session-id' ,
2025-08-25 14:20:17 -07:00
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-08-24 19:11:41 -07:00
'event.name' : EVENT_MALFORMED_JSON_RESPONSE ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-08-24 19:11:41 -07:00
model : 'test-model' ,
} ,
} ) ;
} ) ;
} ) ;
2025-09-04 10:20:46 -07:00
2026-03-04 14:27:47 -05:00
describe ( 'logInvalidChunk' , ( ) = > {
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logInvalidChunkEvent' ) ;
vi . spyOn ( metrics , 'recordInvalidChunk' ) ;
} ) ;
it ( 'logs the event to Clearcut and OTEL' , ( ) = > {
const mockConfig = makeFakeConfig ( ) ;
const event = new InvalidChunkEvent ( 'Unexpected token' ) ;
logInvalidChunk ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logInvalidChunkEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Invalid chunk received from stream.' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
'installation.id' : 'test-installation-id' ,
'event.name' : EVENT_INVALID_CHUNK ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
interactive : false ,
'error.message' : 'Unexpected token' ,
} ,
} ) ;
expect ( metrics . recordInvalidChunk ) . toHaveBeenCalledWith ( mockConfig ) ;
} ) ;
} ) ;
2025-09-04 10:20:46 -07:00
describe ( 'logFileOperation' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getTargetDir : ( ) = > 'target-dir' ,
getUsageStatisticsEnabled : ( ) = > true ,
getTelemetryEnabled : ( ) = > true ,
getTelemetryLogPromptsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
} as unknown as Config ;
2025-09-04 10:20:46 -07:00
const mockMetrics = {
recordFileOperationMetric : vi.fn ( ) ,
} ;
beforeEach ( ( ) = > {
vi . spyOn ( metrics , 'recordFileOperationMetric' ) . mockImplementation (
mockMetrics . recordFileOperationMetric ,
) ;
} ) ;
it ( 'should log a file operation event' , ( ) = > {
const event = new FileOperationEvent (
'test-tool' ,
FileOperation . READ ,
10 ,
'text/plain' ,
'.txt' ,
'typescript' ,
) ;
logFileOperation ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'File operation: read. Lines: 10.' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-04 10:20:46 -07:00
'event.name' : EVENT_FILE_OPERATION ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-09-04 10:20:46 -07:00
tool_name : 'test-tool' ,
operation : 'read' ,
lines : 10 ,
mimetype : 'text/plain' ,
extension : '.txt' ,
programming_language : 'typescript' ,
} ,
} ) ;
expect ( mockMetrics . recordFileOperationMetric ) . toHaveBeenCalledWith (
mockConfig ,
2025-10-01 06:33:47 -07:00
{
operation : 'read' ,
lines : 10 ,
mimetype : 'text/plain' ,
extension : '.txt' ,
programming_language : 'typescript' ,
} ,
2025-09-04 10:20:46 -07:00
) ;
} ) ;
} ) ;
2025-09-11 08:40:46 -07:00
describe ( 'logToolOutputTruncated' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-09-11 08:40:46 -07:00
} as unknown as Config ;
it ( 'should log a tool output truncated event' , ( ) = > {
const event = new ToolOutputTruncatedEvent ( 'prompt-id-1' , {
toolName : 'test-tool' ,
originalContentLength : 1000 ,
truncatedContentLength : 100 ,
threshold : 500 ,
} ) ;
logToolOutputTruncated ( mockConfig , event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Tool output truncated for test-tool.' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-10-02 12:51:14 -04:00
'event.name' : EVENT_TOOL_OUTPUT_TRUNCATED ,
2025-09-11 08:40:46 -07:00
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
eventName : 'tool_output_truncated' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-09-11 08:40:46 -07:00
prompt_id : 'prompt-id-1' ,
tool_name : 'test-tool' ,
original_content_length : 1000 ,
truncated_content_length : 100 ,
threshold : 500 ,
} ,
} ) ;
} ) ;
} ) ;
2025-09-16 16:53:58 -04:00
describe ( 'logModelRouting' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-09-16 16:53:58 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logModelRoutingEvent' ) ;
vi . spyOn ( metrics , 'recordModelRoutingMetrics' ) ;
} ) ;
it ( 'should log the event to Clearcut and OTEL, and record metrics' , ( ) = > {
const event = new ModelRoutingEvent (
'gemini-pro' ,
'default' ,
100 ,
'test-reason' ,
false ,
undefined ,
2026-02-24 19:15:14 -05:00
ApprovalMode . DEFAULT ,
2025-09-16 16:53:58 -04:00
) ;
logModelRouting ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logModelRoutingEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Model routing decision. Model: gemini-pro, Source: default' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-16 16:53:58 -04:00
. . . event ,
'event.name' : EVENT_MODEL_ROUTING ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-09-16 16:53:58 -04:00
} ,
} ) ;
expect ( metrics . recordModelRoutingMetrics ) . toHaveBeenCalledWith (
mockConfig ,
event ,
) ;
} ) ;
2026-02-03 06:43:57 -06:00
it ( 'should log the event with numerical routing fields' , ( ) = > {
const event = new ModelRoutingEvent (
'gemini-pro' ,
'NumericalClassifier (Strict)' ,
150 ,
'[Score: 90 / Threshold: 80] reasoning' ,
false ,
undefined ,
2026-02-24 19:15:14 -05:00
ApprovalMode . DEFAULT ,
2026-02-03 06:43:57 -06:00
true ,
'80' ,
) ;
logModelRouting ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logModelRoutingEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Model routing decision. Model: gemini-pro, Source: NumericalClassifier (Strict)' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
'installation.id' : 'test-installation-id' ,
. . . event ,
'event.name' : EVENT_MODEL_ROUTING ,
interactive : false ,
} ,
} ) ;
} ) ;
2025-09-16 16:53:58 -04:00
it ( 'should only log to Clearcut if OTEL SDK is not initialized' , ( ) = > {
vi . spyOn ( sdk , 'isTelemetrySdkInitialized' ) . mockReturnValue ( false ) ;
2025-12-02 21:27:37 -08:00
vi . spyOn ( sdk , 'bufferTelemetryEvent' ) . mockImplementation ( ( ) = > { } ) ;
2025-09-16 16:53:58 -04:00
const event = new ModelRoutingEvent (
'gemini-pro' ,
'default' ,
100 ,
'test-reason' ,
false ,
undefined ,
2026-02-24 19:15:14 -05:00
ApprovalMode . DEFAULT ,
2025-09-16 16:53:58 -04:00
) ;
logModelRouting ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logModelRoutingEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . not . toHaveBeenCalled ( ) ;
expect ( metrics . recordModelRoutingMetrics ) . not . toHaveBeenCalled ( ) ;
} ) ;
} ) ;
2025-09-18 14:01:36 -04:00
describe ( 'logExtensionInstall' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-10-10 14:28:13 -07:00
getContentGeneratorConfig : ( ) = > null ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-09-18 14:01:36 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logExtensionInstallEvent' ) ;
} ) ;
afterEach ( ( ) = > {
2025-11-08 10:29:36 -05:00
vi . clearAllMocks ( ) ;
2025-09-18 14:01:36 -04:00
} ) ;
2025-11-08 10:29:36 -05:00
it ( 'should log extension install event' , async ( ) = > {
2025-09-18 14:01:36 -04:00
const event = new ExtensionInstallEvent (
2025-10-21 16:55:16 -04:00
'testing' ,
2025-12-15 09:40:54 -08:00
'testing-hash' ,
2025-10-21 16:55:16 -04:00
'testing-id' ,
2025-09-18 14:01:36 -04:00
'0.1.0' ,
'git' ,
2026-02-13 11:27:20 -05:00
CoreToolCallStatus . Success ,
2025-09-18 14:01:36 -04:00
) ;
2025-11-08 10:29:36 -05:00
await logExtensionInstallEvent ( mockConfig , event ) ;
2025-09-18 14:01:36 -04:00
expect (
ClearcutLogger . prototype . logExtensionInstallEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-10-21 16:55:16 -04:00
body : 'Installed extension testing' ,
2025-09-18 14:01:36 -04:00
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-18 14:01:36 -04:00
'event.name' : EVENT_EXTENSION_INSTALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-21 16:55:16 -04:00
extension_name : 'testing' ,
2025-09-18 14:01:36 -04:00
extension_version : '0.1.0' ,
extension_source : 'git' ,
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-09-18 14:01:36 -04:00
} ,
} ) ;
} ) ;
} ) ;
2025-11-08 10:29:36 -05:00
describe ( 'logExtensionUpdate' , async ( ) = > {
2025-10-10 14:28:13 -07:00
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
getContentGeneratorConfig : ( ) = > null ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-10-10 14:28:13 -07:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logExtensionUpdateEvent' ) ;
} ) ;
afterEach ( ( ) = > {
2025-11-08 10:29:36 -05:00
vi . clearAllMocks ( ) ;
2025-10-10 14:28:13 -07:00
} ) ;
2025-11-08 10:29:36 -05:00
it ( 'should log extension update event' , async ( ) = > {
2025-10-10 14:28:13 -07:00
const event = new ExtensionUpdateEvent (
2025-10-21 16:55:16 -04:00
'testing' ,
2025-12-15 09:40:54 -08:00
'testing-hash' ,
2025-10-21 16:55:16 -04:00
'testing-id' ,
2025-10-10 14:28:13 -07:00
'0.1.0' ,
'0.1.1' ,
'git' ,
2026-02-13 11:27:20 -05:00
CoreToolCallStatus . Success ,
2025-10-10 14:28:13 -07:00
) ;
2025-11-08 10:29:36 -05:00
await logExtensionUpdateEvent ( mockConfig , event ) ;
2025-10-10 14:28:13 -07:00
expect (
ClearcutLogger . prototype . logExtensionUpdateEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-10-21 16:55:16 -04:00
body : 'Updated extension testing' ,
2025-10-10 14:28:13 -07:00
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
'installation.id' : 'test-installation-id' ,
'event.name' : EVENT_EXTENSION_UPDATE ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-21 16:55:16 -04:00
extension_name : 'testing' ,
2025-10-10 14:28:13 -07:00
extension_version : '0.1.0' ,
extension_previous_version : '0.1.1' ,
extension_source : 'git' ,
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-10-10 14:28:13 -07:00
} ,
} ) ;
} ) ;
} ) ;
2025-11-08 10:29:36 -05:00
describe ( 'logExtensionUninstall' , async ( ) = > {
2025-09-18 14:01:36 -04:00
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-10-10 14:28:13 -07:00
getContentGeneratorConfig : ( ) = > null ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-09-18 14:01:36 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logExtensionUninstallEvent' ) ;
} ) ;
afterEach ( ( ) = > {
2025-11-08 10:29:36 -05:00
vi . clearAllMocks ( ) ;
2025-09-18 14:01:36 -04:00
} ) ;
2025-11-08 10:29:36 -05:00
it ( 'should log extension uninstall event' , async ( ) = > {
2025-10-21 16:55:16 -04:00
const event = new ExtensionUninstallEvent (
'testing' ,
2025-12-15 09:40:54 -08:00
'testing-hash' ,
2025-10-21 16:55:16 -04:00
'testing-id' ,
2026-02-13 11:27:20 -05:00
CoreToolCallStatus . Success ,
2025-10-21 16:55:16 -04:00
) ;
2025-09-18 14:01:36 -04:00
2025-11-08 10:29:36 -05:00
await logExtensionUninstall ( mockConfig , event ) ;
2025-09-18 14:01:36 -04:00
expect (
ClearcutLogger . prototype . logExtensionUninstallEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-10-21 16:55:16 -04:00
body : 'Uninstalled extension testing' ,
2025-09-18 14:01:36 -04:00
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-18 14:01:36 -04:00
'event.name' : EVENT_EXTENSION_UNINSTALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-21 16:55:16 -04:00
extension_name : 'testing' ,
2026-02-13 11:27:20 -05:00
status : CoreToolCallStatus.Success ,
2025-09-18 14:01:36 -04:00
} ,
} ) ;
} ) ;
} ) ;
2025-11-08 10:29:36 -05:00
describe ( 'logExtensionEnable' , async ( ) = > {
2025-09-18 14:01:36 -04:00
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-09-18 14:01:36 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logExtensionEnableEvent' ) ;
} ) ;
afterEach ( ( ) = > {
2025-11-08 10:29:36 -05:00
vi . clearAllMocks ( ) ;
2025-09-18 14:01:36 -04:00
} ) ;
2025-11-08 10:29:36 -05:00
it ( 'should log extension enable event' , async ( ) = > {
2025-12-15 09:40:54 -08:00
const event = new ExtensionEnableEvent (
'testing' ,
'testing-hash' ,
'testing-id' ,
'user' ,
) ;
2025-09-18 14:01:36 -04:00
2025-11-08 10:29:36 -05:00
await logExtensionEnable ( mockConfig , event ) ;
2025-09-18 14:01:36 -04:00
expect (
ClearcutLogger . prototype . logExtensionEnableEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-10-21 16:55:16 -04:00
body : 'Enabled extension testing' ,
2025-09-18 14:01:36 -04:00
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-18 14:01:36 -04:00
'event.name' : EVENT_EXTENSION_ENABLE ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-21 16:55:16 -04:00
extension_name : 'testing' ,
2025-09-18 14:01:36 -04:00
setting_scope : 'user' ,
} ,
} ) ;
} ) ;
} ) ;
2025-09-23 14:37:35 -04:00
describe ( 'logExtensionDisable' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-09-23 14:37:35 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logExtensionDisableEvent' ) ;
} ) ;
afterEach ( ( ) = > {
2025-11-08 10:29:36 -05:00
vi . clearAllMocks ( ) ;
2025-09-23 14:37:35 -04:00
} ) ;
2025-11-08 10:29:36 -05:00
it ( 'should log extension disable event' , async ( ) = > {
2025-12-15 09:40:54 -08:00
const event = new ExtensionDisableEvent (
'testing' ,
'testing-hash' ,
'testing-id' ,
'user' ,
) ;
2025-09-23 14:37:35 -04:00
2025-11-08 10:29:36 -05:00
await logExtensionDisable ( mockConfig , event ) ;
2025-09-23 14:37:35 -04:00
expect (
ClearcutLogger . prototype . logExtensionDisableEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
2025-10-21 16:55:16 -04:00
body : 'Disabled extension testing' ,
2025-09-23 14:37:35 -04:00
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-09-23 14:37:35 -04:00
'event.name' : EVENT_EXTENSION_DISABLE ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-21 16:55:16 -04:00
extension_name : 'testing' ,
2025-09-23 14:37:35 -04:00
setting_scope : 'user' ,
} ,
} ) ;
} ) ;
} ) ;
2025-10-08 15:42:33 -04:00
describe ( 'logAgentStart' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-10-08 15:42:33 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logAgentStartEvent' ) ;
} ) ;
it ( 'should log agent start event' , ( ) = > {
const event = new AgentStartEvent ( 'agent-123' , 'TestAgent' ) ;
logAgentStart ( mockConfig , event ) ;
expect ( ClearcutLogger . prototype . logAgentStartEvent ) . toHaveBeenCalledWith (
event ,
) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Agent TestAgent started. ID: agent-123' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-10-08 15:42:33 -04:00
'event.name' : EVENT_AGENT_START ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-08 15:42:33 -04:00
agent_id : 'agent-123' ,
agent_name : 'TestAgent' ,
} ,
} ) ;
} ) ;
} ) ;
describe ( 'logAgentFinish' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-10-08 15:42:33 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logAgentFinishEvent' ) ;
vi . spyOn ( metrics , 'recordAgentRunMetrics' ) ;
} ) ;
it ( 'should log agent finish event and record metrics' , ( ) = > {
const event = new AgentFinishEvent (
'agent-123' ,
'TestAgent' ,
1000 ,
5 ,
AgentTerminateMode . GOAL ,
) ;
logAgentFinish ( mockConfig , event ) ;
expect ( ClearcutLogger . prototype . logAgentFinishEvent ) . toHaveBeenCalledWith (
event ,
) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Agent TestAgent finished. Reason: GOAL. Duration: 1000ms. Turns: 5.' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-10-08 15:42:33 -04:00
'event.name' : EVENT_AGENT_FINISH ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-08 15:42:33 -04:00
agent_id : 'agent-123' ,
agent_name : 'TestAgent' ,
duration_ms : 1000 ,
turn_count : 5 ,
terminate_reason : 'GOAL' ,
} ,
} ) ;
expect ( metrics . recordAgentRunMetrics ) . toHaveBeenCalledWith (
mockConfig ,
event ,
) ;
} ) ;
} ) ;
2025-10-09 13:01:17 -04:00
describe ( 'logWebFetchFallbackAttempt' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
2025-11-11 02:03:32 -08:00
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-10-09 13:01:17 -04:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logWebFetchFallbackAttemptEvent' ) ;
} ) ;
it ( 'should log web fetch fallback attempt event' , ( ) = > {
const event = new WebFetchFallbackAttemptEvent ( 'private_ip' ) ;
logWebFetchFallbackAttempt ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logWebFetchFallbackAttemptEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Web fetch fallback attempt. Reason: private_ip' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
2025-10-10 12:06:08 -04:00
'installation.id' : 'test-installation-id' ,
2025-10-09 13:01:17 -04:00
'event.name' : EVENT_WEB_FETCH_FALLBACK_ATTEMPT ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
2025-11-11 02:03:32 -08:00
interactive : false ,
2025-10-09 13:01:17 -04:00
reason : 'private_ip' ,
} ,
} ) ;
} ) ;
} ) ;
2025-12-21 16:18:42 -05:00
describe ( 'logHookCall' , ( ) = > {
const mockConfig = {
getSessionId : ( ) = > 'test-session-id' ,
getUsageStatisticsEnabled : ( ) = > true ,
isInteractive : ( ) = > false ,
2026-02-10 13:01:35 -08:00
getExperiments : ( ) = > undefined ,
getExperimentsAsync : async ( ) = > undefined ,
2025-12-21 16:18:42 -05:00
getTelemetryLogPromptsEnabled : ( ) = > false ,
2026-02-27 10:15:06 -08:00
getContentGeneratorConfig : ( ) = > undefined ,
2025-12-21 16:18:42 -05:00
} as unknown as Config ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logHookCallEvent' ) ;
vi . spyOn ( metrics , 'recordHookCallMetrics' ) ;
} ) ;
it ( 'should log hook call event to Clearcut and OTEL' , ( ) = > {
const event = new HookCallEvent (
'before-tool' ,
2026-02-24 13:03:36 -08:00
HookType . Command ,
2025-12-21 16:18:42 -05:00
'/path/to/script.sh' ,
{ arg : 'val' } ,
150 ,
true ,
{ out : 'res' } ,
0 ,
) ;
logHookCall ( mockConfig , event ) ;
expect ( ClearcutLogger . prototype . logHookCallEvent ) . toHaveBeenCalledWith (
event ,
) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Hook call before-tool./path/to/script.sh succeeded in 150ms' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
'installation.id' : 'test-installation-id' ,
'event.name' : EVENT_HOOK_CALL ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
interactive : false ,
hook_event_name : 'before-tool' ,
hook_type : 'command' ,
hook_name : 'script.sh' , // Sanitized because logPrompts is false
duration_ms : 150 ,
success : true ,
exit_code : 0 ,
} ,
} ) ;
expect ( metrics . recordHookCallMetrics ) . toHaveBeenCalledWith (
mockConfig ,
'before-tool' ,
'/path/to/script.sh' ,
150 ,
true ,
) ;
} ) ;
} ) ;
2026-03-11 14:55:48 -04:00
describe ( 'logNetworkRetryAttempt' , ( ) = > {
const mockConfig = makeFakeConfig ( ) ;
beforeEach ( ( ) = > {
vi . spyOn ( ClearcutLogger . prototype , 'logNetworkRetryAttemptEvent' ) ;
vi . spyOn ( metrics , 'recordRetryAttemptMetrics' ) ;
} ) ;
it ( 'logs the network retry attempt event to Clearcut and OTEL' , ( ) = > {
const event = new NetworkRetryAttemptEvent (
2 ,
5 ,
'Overloaded' ,
1000 ,
'test-model' ,
) ;
logNetworkRetryAttempt ( mockConfig , event ) ;
expect (
ClearcutLogger . prototype . logNetworkRetryAttemptEvent ,
) . toHaveBeenCalledWith ( event ) ;
expect ( mockLogger . emit ) . toHaveBeenCalledWith ( {
body : 'Network retry attempt 2/5 for test-model. Delay: 1000ms. Error type: Overloaded' ,
attributes : {
'session.id' : 'test-session-id' ,
'user.email' : 'test-user@example.com' ,
'installation.id' : 'test-installation-id' ,
'event.name' : EVENT_NETWORK_RETRY_ATTEMPT ,
'event.timestamp' : '2025-01-01T00:00:00.000Z' ,
interactive : false ,
attempt : 2 ,
max_attempts : 5 ,
error_type : 'Overloaded' ,
delay_ms : 1000 ,
model : 'test-model' ,
} ,
} ) ;
expect ( metrics . recordRetryAttemptMetrics ) . toHaveBeenCalledWith (
mockConfig ,
{
model : 'test-model' ,
attempt : 2 ,
} ,
) ;
} ) ;
} ) ;
2025-12-02 21:27:37 -08:00
describe ( 'Telemetry Buffering' , ( ) = > {
2026-02-10 13:01:35 -08:00
it ( 'should buffer events when SDK is not initialized' , async ( ) = > {
2025-12-02 21:27:37 -08:00
vi . spyOn ( sdk , 'isTelemetrySdkInitialized' ) . mockReturnValue ( false ) ;
const bufferSpy = vi
. spyOn ( sdk , 'bufferTelemetryEvent' )
. mockImplementation ( ( ) = > { } ) ;
const mockConfig = makeFakeConfig ( ) ;
const event = new StartSessionEvent ( mockConfig ) ;
logCliConfiguration ( mockConfig , event ) ;
expect ( bufferSpy ) . toHaveBeenCalled ( ) ;
expect ( mockLogger . emit ) . not . toHaveBeenCalled ( ) ;
} ) ;
} ) ;
2025-06-11 17:47:21 +00:00
} ) ;