mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-03-28 23:11:19 -07:00
feat(telemetry) Instrument traces with more attributes and make them available to OTEL users (#20237)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com> Co-authored-by: Jerop Kipruto <jerop@google.com> Co-authored-by: MD. MOHIBUR RAHMAN <35300157+mrpmohiburrahman@users.noreply.github.com> Co-authored-by: Jeffrey Ying <jeffrey.ying86@live.com> Co-authored-by: Bryan Morgan <bryanmorgan@google.com> Co-authored-by: joshualitt <joshualitt@google.com> Co-authored-by: Dev Randalpura <devrandalpura@google.com> Co-authored-by: Google Admin <github-admin@google.com> Co-authored-by: Ben Knutson <benknutson@google.com>
This commit is contained in:
@@ -5,3 +5,34 @@
|
||||
*/
|
||||
|
||||
export const SERVICE_NAME = 'gemini-cli';
|
||||
export const SERVICE_DESCRIPTION =
|
||||
'Gemini CLI is an open-source AI agent that brings the power of Gemini directly into your terminal. It is designed to be a terminal-first, extensible, and powerful tool for developers, engineers, SREs, and beyond.';
|
||||
|
||||
// Gemini CLI specific semantic conventions
|
||||
// https://opentelemetry.io/docs/specs/semconv/registry/attributes/gen-ai/#genai-attributes
|
||||
export const GEN_AI_OPERATION_NAME = 'gen_ai.operation.name';
|
||||
export const GEN_AI_AGENT_NAME = 'gen_ai.agent.name';
|
||||
export const GEN_AI_AGENT_DESCRIPTION = 'gen_ai.agent.description';
|
||||
export const GEN_AI_INPUT_MESSAGES = 'gen_ai.input.messages';
|
||||
export const GEN_AI_OUTPUT_MESSAGES = 'gen_ai.output.messages';
|
||||
export const GEN_AI_REQUEST_MODEL = 'gen_ai.request.model';
|
||||
export const GEN_AI_RESPONSE_MODEL = 'gen_ai.response.model';
|
||||
export const GEN_AI_PROMPT_NAME = 'gen_ai.prompt.name';
|
||||
export const GEN_AI_TOOL_NAME = 'gen_ai.tool.name';
|
||||
export const GEN_AI_TOOL_CALL_ID = 'gen_ai.tool.call_id';
|
||||
export const GEN_AI_TOOL_DESCRIPTION = 'gen_ai.tool.description';
|
||||
export const GEN_AI_USAGE_INPUT_TOKENS = 'gen_ai.usage.input_tokens';
|
||||
export const GEN_AI_USAGE_OUTPUT_TOKENS = 'gen_ai.usage.output_tokens';
|
||||
export const GEN_AI_SYSTEM_INSTRUCTIONS = 'gen_ai.system_instructions';
|
||||
export const GEN_AI_TOOL_DEFINITIONS = 'gen_ai.tool.definitions';
|
||||
export const GEN_AI_CONVERSATION_ID = 'gen_ai.conversation.id';
|
||||
|
||||
// Gemini CLI specific operations
|
||||
export enum GeminiCliOperation {
|
||||
ToolCall = 'tool_call',
|
||||
LLMCall = 'llm_call',
|
||||
UserPrompt = 'user_prompt',
|
||||
SystemPrompt = 'system_prompt',
|
||||
AgentCall = 'agent_call',
|
||||
ScheduleToolCalls = 'schedule_tool_calls',
|
||||
}
|
||||
|
||||
@@ -148,3 +148,4 @@ export {
|
||||
} from './metrics.js';
|
||||
export { runInDevTraceSpan, type SpanMetadata } from './trace.js';
|
||||
export { startupProfiler, StartupProfiler } from './startupProfiler.js';
|
||||
export * from './constants.js';
|
||||
|
||||
188
packages/core/src/telemetry/trace.test.ts
Normal file
188
packages/core/src/telemetry/trace.test.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { trace, SpanStatusCode, diag, type Tracer } from '@opentelemetry/api';
|
||||
import { runInDevTraceSpan } from './trace.js';
|
||||
import {
|
||||
GeminiCliOperation,
|
||||
GEN_AI_CONVERSATION_ID,
|
||||
GEN_AI_AGENT_DESCRIPTION,
|
||||
GEN_AI_AGENT_NAME,
|
||||
GEN_AI_INPUT_MESSAGES,
|
||||
GEN_AI_OPERATION_NAME,
|
||||
GEN_AI_OUTPUT_MESSAGES,
|
||||
SERVICE_DESCRIPTION,
|
||||
SERVICE_NAME,
|
||||
} from './constants.js';
|
||||
|
||||
vi.mock('@opentelemetry/api', async (importOriginal) => {
|
||||
const original = await importOriginal<typeof import('@opentelemetry/api')>();
|
||||
return {
|
||||
...original,
|
||||
trace: {
|
||||
getTracer: vi.fn(),
|
||||
},
|
||||
diag: {
|
||||
error: vi.fn(),
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../utils/session.js', () => ({
|
||||
sessionId: 'test-session-id',
|
||||
}));
|
||||
|
||||
describe('runInDevTraceSpan', () => {
|
||||
const mockSpan = {
|
||||
setAttribute: vi.fn(),
|
||||
setStatus: vi.fn(),
|
||||
recordException: vi.fn(),
|
||||
end: vi.fn(),
|
||||
};
|
||||
|
||||
const mockTracer = {
|
||||
startActiveSpan: vi.fn((name, options, callback) => callback(mockSpan)),
|
||||
} as unknown as Tracer;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.mocked(trace.getTracer).mockReturnValue(mockTracer);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.unstubAllEnvs();
|
||||
});
|
||||
|
||||
it('should start an active span', async () => {
|
||||
const fn = vi.fn(async () => 'result');
|
||||
|
||||
const result = await runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall },
|
||||
fn,
|
||||
);
|
||||
|
||||
expect(result).toBe('result');
|
||||
expect(trace.getTracer).toHaveBeenCalled();
|
||||
expect(mockTracer.startActiveSpan).toHaveBeenCalledWith(
|
||||
GeminiCliOperation.LLMCall,
|
||||
{},
|
||||
expect.any(Function),
|
||||
);
|
||||
});
|
||||
|
||||
it('should set default attributes on the span metadata', async () => {
|
||||
await runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall },
|
||||
async ({ metadata }) => {
|
||||
expect(metadata.attributes[GEN_AI_OPERATION_NAME]).toBe(
|
||||
GeminiCliOperation.LLMCall,
|
||||
);
|
||||
expect(metadata.attributes[GEN_AI_AGENT_NAME]).toBe(SERVICE_NAME);
|
||||
expect(metadata.attributes[GEN_AI_AGENT_DESCRIPTION]).toBe(
|
||||
SERVICE_DESCRIPTION,
|
||||
);
|
||||
expect(metadata.attributes[GEN_AI_CONVERSATION_ID]).toBe(
|
||||
'test-session-id',
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('should set span attributes from metadata on completion', async () => {
|
||||
await runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall },
|
||||
async ({ metadata }) => {
|
||||
metadata.input = { query: 'hello' };
|
||||
metadata.output = { response: 'world' };
|
||||
metadata.attributes['custom.attr'] = 'value';
|
||||
},
|
||||
);
|
||||
|
||||
expect(mockSpan.setAttribute).toHaveBeenCalledWith(
|
||||
GEN_AI_INPUT_MESSAGES,
|
||||
JSON.stringify({ query: 'hello' }),
|
||||
);
|
||||
expect(mockSpan.setAttribute).toHaveBeenCalledWith(
|
||||
GEN_AI_OUTPUT_MESSAGES,
|
||||
JSON.stringify({ response: 'world' }),
|
||||
);
|
||||
expect(mockSpan.setAttribute).toHaveBeenCalledWith('custom.attr', 'value');
|
||||
expect(mockSpan.setStatus).toHaveBeenCalledWith({
|
||||
code: SpanStatusCode.OK,
|
||||
});
|
||||
expect(mockSpan.end).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle errors in the wrapped function', async () => {
|
||||
const error = new Error('test error');
|
||||
await expect(
|
||||
runInDevTraceSpan({ operation: GeminiCliOperation.LLMCall }, async () => {
|
||||
throw error;
|
||||
}),
|
||||
).rejects.toThrow(error);
|
||||
|
||||
expect(mockSpan.setStatus).toHaveBeenCalledWith({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: 'test error',
|
||||
});
|
||||
expect(mockSpan.recordException).toHaveBeenCalledWith(error);
|
||||
expect(mockSpan.end).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should respect noAutoEnd option', async () => {
|
||||
let capturedEndSpan: () => void = () => {};
|
||||
const result = await runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall, noAutoEnd: true },
|
||||
async ({ endSpan }) => {
|
||||
capturedEndSpan = endSpan;
|
||||
return 'streaming';
|
||||
},
|
||||
);
|
||||
|
||||
expect(result).toBe('streaming');
|
||||
expect(mockSpan.end).not.toHaveBeenCalled();
|
||||
|
||||
capturedEndSpan();
|
||||
expect(mockSpan.end).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should automatically end span on error even if noAutoEnd is true', async () => {
|
||||
const error = new Error('streaming error');
|
||||
await expect(
|
||||
runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall, noAutoEnd: true },
|
||||
async () => {
|
||||
throw error;
|
||||
},
|
||||
),
|
||||
).rejects.toThrow(error);
|
||||
|
||||
expect(mockSpan.end).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle exceptions in endSpan gracefully', async () => {
|
||||
mockSpan.setAttribute.mockImplementation(() => {
|
||||
throw new Error('attribute error');
|
||||
});
|
||||
|
||||
await runInDevTraceSpan(
|
||||
{ operation: GeminiCliOperation.LLMCall },
|
||||
async ({ metadata }) => {
|
||||
metadata.input = 'trigger error';
|
||||
},
|
||||
);
|
||||
|
||||
expect(diag.error).toHaveBeenCalled();
|
||||
expect(mockSpan.setStatus).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
code: SpanStatusCode.ERROR,
|
||||
message: expect.stringContaining('attribute error'),
|
||||
}),
|
||||
);
|
||||
expect(mockSpan.end).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -12,6 +12,18 @@ import {
|
||||
type SpanOptions,
|
||||
} from '@opentelemetry/api';
|
||||
import { safeJsonStringify } from '../utils/safeJsonStringify.js';
|
||||
import {
|
||||
type GeminiCliOperation,
|
||||
GEN_AI_AGENT_DESCRIPTION,
|
||||
GEN_AI_AGENT_NAME,
|
||||
GEN_AI_CONVERSATION_ID,
|
||||
GEN_AI_INPUT_MESSAGES,
|
||||
GEN_AI_OPERATION_NAME,
|
||||
GEN_AI_OUTPUT_MESSAGES,
|
||||
SERVICE_DESCRIPTION,
|
||||
SERVICE_NAME,
|
||||
} from './constants.js';
|
||||
import { sessionId } from '../utils/session.js';
|
||||
|
||||
const TRACER_NAME = 'gemini-cli';
|
||||
const TRACER_VERSION = 'v1';
|
||||
@@ -51,7 +63,7 @@ export interface SpanMetadata {
|
||||
* @returns The result of the function.
|
||||
*/
|
||||
export async function runInDevTraceSpan<R>(
|
||||
opts: SpanOptions & { name: string; noAutoEnd?: boolean },
|
||||
opts: SpanOptions & { operation: GeminiCliOperation; noAutoEnd?: boolean },
|
||||
fn: ({
|
||||
metadata,
|
||||
}: {
|
||||
@@ -59,33 +71,32 @@ export async function runInDevTraceSpan<R>(
|
||||
endSpan: () => void;
|
||||
}) => Promise<R>,
|
||||
): Promise<R> {
|
||||
const { name: spanName, noAutoEnd, ...restOfSpanOpts } = opts;
|
||||
if (process.env['GEMINI_DEV_TRACING'] !== 'true') {
|
||||
// If GEMINI_DEV_TRACING env var not set, we do not trace.
|
||||
return fn({
|
||||
metadata: {
|
||||
name: spanName,
|
||||
attributes: {},
|
||||
},
|
||||
endSpan: () => {
|
||||
// noop
|
||||
},
|
||||
});
|
||||
}
|
||||
const { operation, noAutoEnd, ...restOfSpanOpts } = opts;
|
||||
|
||||
const tracer = trace.getTracer(TRACER_NAME, TRACER_VERSION);
|
||||
return tracer.startActiveSpan(opts.name, restOfSpanOpts, async (span) => {
|
||||
return tracer.startActiveSpan(operation, restOfSpanOpts, async (span) => {
|
||||
const meta: SpanMetadata = {
|
||||
name: spanName,
|
||||
attributes: {},
|
||||
name: operation,
|
||||
attributes: {
|
||||
[GEN_AI_OPERATION_NAME]: operation,
|
||||
[GEN_AI_AGENT_NAME]: SERVICE_NAME,
|
||||
[GEN_AI_AGENT_DESCRIPTION]: SERVICE_DESCRIPTION,
|
||||
[GEN_AI_CONVERSATION_ID]: sessionId,
|
||||
},
|
||||
};
|
||||
const endSpan = () => {
|
||||
try {
|
||||
if (meta.input !== undefined) {
|
||||
span.setAttribute('input-json', safeJsonStringify(meta.input));
|
||||
span.setAttribute(
|
||||
GEN_AI_INPUT_MESSAGES,
|
||||
safeJsonStringify(meta.input),
|
||||
);
|
||||
}
|
||||
if (meta.output !== undefined) {
|
||||
span.setAttribute('output-json', safeJsonStringify(meta.output));
|
||||
span.setAttribute(
|
||||
GEN_AI_OUTPUT_MESSAGES,
|
||||
safeJsonStringify(meta.output),
|
||||
);
|
||||
}
|
||||
for (const [key, value] of Object.entries(meta.attributes)) {
|
||||
span.setAttribute(key, value);
|
||||
|
||||
Reference in New Issue
Block a user