This commit is contained in:
Your Name
2026-05-14 03:54:59 +00:00
parent cc3b17a32f
commit a31aa6094f
22 changed files with 433 additions and 61 deletions
+232
View File
@@ -0,0 +1,232 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { TestRig } from './test-helper.js';
import * as path from 'node:path';
import * as fs from 'node:fs';
import { FinishReason, GenerateContentResponse } from '@google/genai';
import type { FakeResponse, HistoryTurn } from '@google/gemini-cli-core';
describe('Context Management Fidelity E2E', () => {
let rig: TestRig;
beforeEach(() => {
rig = new TestRig();
});
afterEach(async () => await rig.cleanup());
it('should reproduce the exact context working buffer on resume', async () => {
// Mock responses to trigger GC (summarization)
const snapshotResponse: FakeResponse = {
method: 'generateContent',
response: {
candidates: [
{
content: {
parts: [
{
text: JSON.stringify({
new_facts: ['GC Triggered.'],
new_constraints: [],
new_tasks: [],
resolved_task_ids: [],
obsolete_fact_indices: [],
obsolete_constraint_indices: [],
chronological_summary: 'Snapshot created.',
}),
},
],
role: 'model',
},
finishReason: FinishReason.STOP,
index: 0,
},
],
} as unknown as GenerateContentResponse,
};
const countTokensResponse: FakeResponse = {
method: 'countTokens',
response: { totalTokens: 50000 },
};
const streamResponse = (text: string): FakeResponse => ({
method: 'generateContentStream',
response: [
{
candidates: [
{
content: { parts: [{ text }], role: 'model' },
finishReason: FinishReason.STOP,
index: 0,
},
],
},
] as unknown as GenerateContentResponse[],
});
const setupResponses = (fileName: string, mocks: FakeResponse[]) => {
const filePath = path.join(rig.testDir!, fileName);
fs.writeFileSync(
filePath,
mocks.map((m) => JSON.stringify(m)).join('\n'),
);
return filePath;
};
await rig.setup('context-fidelity', {
settings: {
experimental: {
stressTestProfile: true, // Lowers thresholds to trigger GC easily
},
},
});
const massivePayload = 'X'.repeat(50000);
const traceDir = path.join(rig.testDir!, 'traces');
fs.mkdirSync(traceDir, { recursive: true });
const traceLog = path.join(traceDir, 'trace.log');
const commonEnv = {
GEMINI_API_KEY: 'mock-key',
GEMINI_CONTEXT_TRACE_DIR: traceDir,
GEMINI_CONTEXT_TRACE_ENABLED: 'true',
GEMINI_DEBUG_LOG_FILE: path.join(rig.testDir!, 'debug.log'),
};
const runMocks: FakeResponse[] = [
streamResponse('Ack 1'),
streamResponse('Ack 2'),
streamResponse('Ack 3'),
streamResponse('Ack 4'),
streamResponse('Ack 5'),
];
for (let i = 0; i < 50; i++) {
runMocks.push(snapshotResponse);
runMocks.push(countTokensResponse);
}
// Turn 1: Initial massive payload to put pressure
await rig.run({
args: [
'--debug',
'--fake-responses-non-strict',
setupResponses('resp1.json', runMocks),
],
stdin: 'Turn 1: ' + massivePayload,
env: commonEnv,
});
// Turn 2: Another turn, resuming Turn 1
await rig.run({
args: [
'--debug',
'--resume',
'latest',
'--fake-responses-non-strict',
setupResponses('resp2.json', runMocks),
],
stdin: 'Turn 2: ' + massivePayload,
env: commonEnv,
});
// Turn 3: Third turn to force GC, resuming Turn 2
await rig.run({
args: [
'--debug',
'--resume',
'latest',
'--fake-responses-non-strict',
setupResponses('resp3.json', runMocks),
],
stdin: 'Turn 3: ' + massivePayload,
env: commonEnv,
});
// Extract the rendered context asset from the log
const getRenderedContext = (logContent: string): HistoryTurn[] | null => {
const lines = logContent.split('\n');
const renderLines = lines.filter(
(l) =>
l.includes('[Render] Render Sanitized Context for LLM') ||
l.includes('[Render] Render Context for LLM'),
);
if (renderLines.length === 0) return null;
const lastRender = renderLines[renderLines.length - 1];
const detailsMatch = lastRender.match(/\| Details: (.*)$/);
if (!detailsMatch) return null;
const details = JSON.parse(detailsMatch[1]);
const assetInfo =
details.renderedContextSanitized || details.renderedContext;
if (assetInfo && assetInfo.$asset) {
const assetPath = path.join(traceDir, 'assets', assetInfo.$asset);
return JSON.parse(fs.readFileSync(assetPath, 'utf-8'));
}
return assetInfo;
};
const log1 = fs.readFileSync(traceLog, 'utf-8');
const contextBeforeExit = getRenderedContext(log1);
expect(contextBeforeExit).toBeDefined();
console.log(
'Context Before Exit (First 2 turns):',
JSON.stringify(contextBeforeExit!.slice(0, 2), null, 2),
);
// Turn 4: Resume and run a small command
await rig.run({
args: [
'--debug',
'--resume',
'latest',
'--fake-responses-non-strict',
setupResponses('resp4.json', runMocks),
'continue',
],
env: commonEnv,
});
const log2 = fs.readFileSync(traceLog, 'utf-8');
const contextAfterResume = getRenderedContext(log2);
expect(contextAfterResume).toBeDefined();
console.log(
'Context After Resume (First 2 turns):',
JSON.stringify(contextAfterResume!.slice(0, 2), null, 2),
);
expect(contextAfterResume!.length).toBeGreaterThanOrEqual(
contextBeforeExit!.length,
);
for (let i = 0; i < contextBeforeExit!.length; i++) {
expect(contextAfterResume![i].id).toBe(contextBeforeExit![i].id);
expect(contextAfterResume![i].content).toEqual(
contextBeforeExit![i].content,
);
}
// Most importantly, synthetic IDs (like summaries) must be stable.
const syntheticTurns = contextBeforeExit!.filter(
(t: HistoryTurn) => t.id && t.id.length === 32,
); // deriveStableId produces 32-char hex
expect(syntheticTurns.length).toBeGreaterThan(0);
const syntheticTurnsAfter = contextAfterResume!.filter(
(t: HistoryTurn) => t.id && t.id.length === 32,
);
expect(syntheticTurnsAfter.length).toBeGreaterThanOrEqual(
syntheticTurns.length,
);
// Check if the first synthetic turn is identical
expect(syntheticTurnsAfter[0].id).toBe(syntheticTurns[0].id);
expect(syntheticTurnsAfter[0].content).toEqual(syntheticTurns[0].content);
});
});
@@ -9,16 +9,21 @@ import {
supersedeStaleSnapshots,
SNAPSHOT_SUPERSEDED_PLACEHOLDER,
} from './snapshotSuperseder.js';
import type { GeminiChat } from '../../core/geminiChat.js';
import type { GeminiChat, HistoryTurn } from '../../core/geminiChat.js';
import type { Content } from '@google/genai';
import { randomUUID } from 'node:crypto';
/** Builds a minimal mock GeminiChat around a mutable history array. */
function createMockChat(history: Content[]): GeminiChat {
const getTurns = () => history.map((c) => ({ id: randomUUID(), content: c }));
return {
getHistory: vi.fn(() => [...history]),
setHistory: vi.fn((newHistory: readonly Content[]) => {
getHistoryTurns: vi.fn(() => getTurns()),
setHistory: vi.fn((newHistory: ReadonlyArray<Content | HistoryTurn>) => {
history.length = 0;
history.push(...newHistory);
for (const item of newHistory) {
history.push('content' in item ? item.content : item);
}
}),
} as unknown as GeminiChat;
}
@@ -762,13 +762,13 @@ describe('LocalAgentExecutor', () => {
const firstPart =
'content' in history[0]
? history[0].content.parts?.[0]
: (history[0] as Content).parts?.[0];
: history[0].parts?.[0];
expect(firstPart?.text).toBe('Goal: TestGoal');
const secondPart =
'content' in history[1]
? history[1].content.parts?.[0]
: (history[1] as Content).parts?.[0];
: history[1].parts?.[0];
expect(secondPart?.text).toBe('OK, starting on TestGoal.');
});
@@ -15,7 +15,6 @@ import {
type FunctionCall,
type FunctionDeclaration,
} from '@google/genai';
import { randomUUID } from 'node:crypto';
import { ToolRegistry } from '../tools/tool-registry.js';
import { PromptRegistry } from '../prompts/prompt-registry.js';
import { ResourceRegistry } from '../resources/resource-registry.js';
@@ -33,8 +33,14 @@ describe('ContextManager Sync Pressure Barrier Tests', () => {
// 2. Add System Prompt (Episode 0 - Protected)
chatHistory.set([
{ id: 'h1', content: { role: 'user', parts: [{ text: 'System prompt' }] } },
{ id: 'h2', content: { role: 'model', parts: [{ text: 'Understood.' }] } },
{
id: 'h1',
content: { role: 'user', parts: [{ text: 'System prompt' }] },
},
{
id: 'h2',
content: { role: 'model', parts: [{ text: 'Understood.' }] },
},
]);
// 3. Add massive history that blows past the 150k maxTokens limit
@@ -47,7 +47,9 @@ describe('ContextManager - Hot Start Calibration', () => {
const emitGroundTruthSpy = vi.spyOn(env.eventBus, 'emitTokenGroundTruth');
// Add a node to make the buffer non-empty
chatHistory.set([{ id: 'h1', content: { role: 'user', parts: [{ text: 'Hello' }] } }]);
chatHistory.set([
{ id: 'h1', content: { role: 'user', parts: [{ text: 'Hello' }] } },
]);
// First render should trigger calibration
await contextManager.renderHistory();
@@ -81,7 +83,9 @@ describe('ContextManager - Hot Start Calibration', () => {
);
// Add a node
chatHistory.set([{ id: 'h1', content: { role: 'user', parts: [{ text: 'Hello' }] } }]);
chatHistory.set([
{ id: 'h1', content: { role: 'user', parts: [{ text: 'Hello' }] } },
]);
// Render should succeed without throwing
const result = await contextManager.renderHistory();
+7 -7
View File
@@ -5,7 +5,10 @@
*/
import type { Content } from '@google/genai';
import type { AgentChatHistory, HistoryTurn } from '../core/agentChatHistory.js';
import type {
AgentChatHistory,
HistoryTurn,
} from '../core/agentChatHistory.js';
import { isToolExecution, type ConcreteNode } from './graph/types.js';
import type { ContextEventBus } from './eventBus.js';
import type { ContextTracer } from './tracer.js';
@@ -400,12 +403,9 @@ export class ContextManager {
this.tracer.logEvent('ContextManager', 'Finished rendering');
const hardenedHistory = hardenHistory(
renderedHistory,
{
sentinels: this.sidecar.sentinels,
},
);
const hardenedHistory = hardenHistory(renderedHistory, {
sentinels: this.sidecar.sentinels,
});
const apiHistory = hardenedHistory.map((h) => h.content);
if (header) {
+10 -2
View File
@@ -99,7 +99,11 @@ export async function render(
tracer.logEvent('Render', 'Render Context for LLM', {
renderedContext: contents,
});
performCalibration(env, visibleNodes, contents.map(h => h.content));
performCalibration(
env,
visibleNodes,
contents.map((h) => h.content),
);
return {
history: contents,
didApplyManagement: false,
@@ -153,7 +157,11 @@ export async function render(
tracer.logEvent('Render', 'Render Sanitized Context for LLM', {
renderedContextSanitized: contents,
});
performCalibration(env, visibleNodes, contents.map(h => h.content));
performCalibration(
env,
visibleNodes,
contents.map((h) => h.content),
);
return {
history: contents,
didApplyManagement: true,
@@ -56,7 +56,10 @@ describe('ContextGraphBuilder', () => {
const complexHistory: HistoryTurn[] = [
{
id: 'turn-1',
content: { role: 'user', parts: [{ text: 'Step 1: complex analysis' }] },
content: {
role: 'user',
parts: [{ text: 'Step 1: complex analysis' }],
},
},
{
id: 'turn-2',
+4 -4
View File
@@ -209,9 +209,9 @@ export class ContextGraphBuilder {
typeof part.functionCall.id === 'string'
? part.functionCall.id
: undefined;
// Use stable API ID if available, otherwise anchor to the turn and index.
const id = apiId
const id = apiId
? `${apiId}_${turnSalt}_${partIdx}`
: `${turnSalt}_${partIdx}`;
@@ -235,8 +235,8 @@ export class ContextGraphBuilder {
isFunctionCallPart(part) && typeof part.functionCall.id === 'string'
? part.functionCall.id
: undefined;
const id = apiId
const id = apiId
? `${apiId}_${turnSalt}_${partIdx}`
: `${turnSalt}_${partIdx}`;
@@ -2,7 +2,7 @@
exports[`System Lifecycle Golden Tests > Scenario 1: Organic Growth with Huge Tool Output & Images 1`] = `
{
"baseUnits": 787,
"baseUnits": 765,
"finalProjection": [
{
"content": {
@@ -176,18 +176,18 @@ exports[`System Lifecycle Golden Tests > Scenario 1: Organic Growth with Huge To
"turnIndex": 1,
},
{
"tokensAfterBackground": 497,
"tokensAfterBackground": 493,
"tokensBeforeBackground": 20232,
"turnIndex": 2,
},
{
"tokensAfterBackground": 750,
"tokensBeforeBackground": 3554,
"tokensAfterBackground": 728,
"tokensBeforeBackground": 3550,
"turnIndex": 3,
},
{
"tokensAfterBackground": 787,
"tokensBeforeBackground": 787,
"tokensAfterBackground": 765,
"tokensBeforeBackground": 765,
"turnIndex": 4,
},
],
@@ -387,7 +387,7 @@ exports[`System Lifecycle Golden Tests > Scenario 4: Async-Driven Background GC
],
"role": "user",
},
"id": "2371dc698715d731086209ad329ea7c9",
"id": "<UUID>",
},
{
"content": {
@@ -27,9 +27,13 @@ describe('Context Manager Hysteresis Tests', () => {
},
});
const getProjectionTokens = (proj: HistoryTurn[], harness: SimulationHarness) =>
const getProjectionTokens = (
proj: HistoryTurn[],
harness: SimulationHarness,
) =>
proj.reduce(
(sum, c) => sum + harness.env.tokenCalculator.calculateContentTokens(c.content),
(sum, c) =>
sum + harness.env.tokenCalculator.calculateContentTokens(c.content),
0,
);
@@ -17,6 +17,7 @@ expect.addSnapshotSerializer({
(/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/i.test(
val,
) ||
/^[0-9a-f]{32}$/i.test(val) ||
/[\\/]tmp[\\/]sim/.test(val)),
print: (val) => {
if (typeof val !== 'string') return `"${val}"`;
@@ -25,6 +26,7 @@ expect.addSnapshotSerializer({
/[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi,
'<UUID>',
)
.replace(/\b[0-9a-f]{32}\b/gi, '<UUID>')
.replace(/[\\/]tmp[\\/]sim[^\s"'\]]*/g, '<MOCKED_DIR>');
// Also scrub timestamps in filenames like blob_1234567890_...
@@ -94,9 +94,9 @@ export class SimulationHarness {
this.chatHistory,
calculator,
);
}
}
async simulateTurn(messages: Content[]) {
async simulateTurn(messages: Content[]) {
// 1. Append the new messages
const currentHistory = this.chatHistory.get();
const turns = messages.map((m) => ({ id: randomUUID(), content: m }));
@@ -104,4 +104,3 @@ export class AgentChatHistory {
return this.history.length;
}
}
+1 -1
View File
@@ -295,7 +295,7 @@ export class GeminiClient {
this.getChat().stripThoughtsFromHistory();
}
setHistory(history: readonly (Content | HistoryTurn)[]) {
setHistory(history: ReadonlyArray<Content | HistoryTurn>) {
this.getChat().setHistory(history);
this.updateTelemetryTokenCount();
this.forceFullIdeContext = true;
+17 -16
View File
@@ -162,7 +162,7 @@ function isValidContent(content: Content): boolean {
* @throws Error if the history does not start with a user turn.
* @throws Error if the history contains an invalid role.
*/
function validateHistory(history: (Content | HistoryTurn)[]) {
function validateHistory(history: Array<Content | HistoryTurn>) {
for (const item of history) {
const content = 'content' in item ? item.content : item;
if (content.role !== 'user' && content.role !== 'model') {
@@ -182,10 +182,7 @@ function validateHistory(history: (Content | HistoryTurn)[]) {
function extractCuratedHistory(
comprehensiveHistory: readonly HistoryTurn[],
): HistoryTurn[] {
if (
comprehensiveHistory === undefined ||
comprehensiveHistory.length === 0
) {
if (comprehensiveHistory === undefined || comprehensiveHistory.length === 0) {
return [];
}
const curatedHistory: HistoryTurn[] = [];
@@ -198,10 +195,7 @@ function extractCuratedHistory(
} else {
const modelOutput: HistoryTurn[] = [];
let isValid = true;
while (
i < length &&
comprehensiveHistory[i].content.role === 'model'
) {
while (i < length && comprehensiveHistory[i].content.role === 'model') {
modelOutput.push(comprehensiveHistory[i]);
if (isValid && !isValidContent(comprehensiveHistory[i].content)) {
isValid = false;
@@ -284,7 +278,7 @@ export class GeminiChat {
readonly context: AgentLoopContext,
private systemInstruction: string = '',
private tools: Tool[] = [],
history: (Content | HistoryTurn)[] = [],
history: Array<Content | HistoryTurn> = [],
resumedSessionData?: ResumedSessionData,
private readonly onModelChanged?: (modelId: string) => Promise<Tool[]>,
) {
@@ -297,8 +291,9 @@ export class GeminiChat {
content: {
role: m.type === 'user' ? 'user' : 'model',
parts: Array.isArray(m.content)
? (m.content as Part[])
: [{ text: m.content as string }],
? // eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
(m.content as Part[])
: [{ text: String(m.content) }],
},
}))
: history.map((item) =>
@@ -325,7 +320,9 @@ export class GeminiChat {
await this.chatRecordingService.initialize(resumedSessionData, kind);
// Sync initial history with the recorder to ensure all turns (even bootstrapped ones)
// are durable and coordinated.
this.chatRecordingService.updateMessagesFromHistory(this.agentHistory.get());
this.chatRecordingService.updateMessagesFromHistory(
this.agentHistory.get(),
);
}
setSystemInstruction(sysInstr: string) {
@@ -598,7 +595,9 @@ export class GeminiChat {
return streamWithRetries.call(this);
}
private extractBinaryInjections(parts: Part[] | undefined): Part[] | undefined {
private extractBinaryInjections(
parts: Part[] | undefined,
): Part[] | undefined {
const binaryParts: Part[] = [];
if (parts) {
for (const part of parts) {
@@ -925,7 +924,7 @@ export class GeminiChat {
}
setHistory(
history: readonly (Content | HistoryTurn)[],
history: ReadonlyArray<Content | HistoryTurn>,
options: { silent?: boolean } = {},
): void {
const wrappedHistory: HistoryTurn[] = history.map((item) => {
@@ -942,7 +941,9 @@ export class GeminiChat {
this.lastPromptTokenCount = estimateTokenCountSync(
this.agentHistory.flatMap((c) => c.content.parts || []),
);
this.chatRecordingService.updateMessagesFromHistory(this.agentHistory.get());
this.chatRecordingService.updateMessagesFromHistory(
this.agentHistory.get(),
);
}
stripThoughtsFromHistory(): void {
@@ -1342,4 +1342,69 @@ describe('ChatRecordingService', () => {
mkdirSyncSpy.mockRestore();
});
});
describe('recordSyntheticMessage and history sync', () => {
it('should correctly record synthetic messages with durable IDs', async () => {
await chatRecordingService.initialize(undefined, 'main');
const parts = [{ text: 'Synthetic Turn' }];
// Implicit ID generation
const id1 = chatRecordingService.recordSyntheticMessage('user', parts);
expect(id1).toBeDefined();
expect(id1).toMatch(/test-uuid-/);
// Explicit ID registration (e.g. from context processor)
const customId = 'stable-hash-123';
const id2 = chatRecordingService.recordSyntheticMessage(
'gemini',
parts,
customId,
);
expect(id2).toBe(customId);
const record = await loadConversationRecord(
chatRecordingService.getConversationFilePath()!,
);
expect(record!.messages).toHaveLength(2);
expect(record!.messages[0].id).toBe(id1);
expect(record!.messages[0].type).toBe('user');
expect(record!.messages[1].id).toBe(customId);
expect(record!.messages[1].type).toBe('gemini');
});
it('should synchronize history turns and maintain their durable identity', async () => {
await chatRecordingService.initialize(undefined, 'main');
const history: HistoryTurn[] = [
{ id: 'h1', content: { role: 'user', parts: [{ text: 'msg1' }] } },
{ id: 'h2', content: { role: 'model', parts: [{ text: 'msg2' }] } },
];
chatRecordingService.updateMessagesFromHistory(history);
const record = await loadConversationRecord(
chatRecordingService.getConversationFilePath()!,
);
expect(record!.messages).toHaveLength(2);
expect(record!.messages[0].id).toBe('h1');
expect(record!.messages[1].id).toBe('h2');
// Update with a summary
const summaryId = 'summary-123';
const updatedHistory: HistoryTurn[] = [
{
id: summaryId,
content: { role: 'user', parts: [{ text: 'summary' }] },
},
...history.slice(1),
];
chatRecordingService.updateMessagesFromHistory(updatedHistory);
const record2 = await loadConversationRecord(
chatRecordingService.getConversationFilePath()!,
);
expect(record2!.messages).toHaveLength(2);
expect(record2!.messages[0].id).toBe(summaryId);
expect(record2!.messages[1].id).toBe('h2');
});
});
});
@@ -514,7 +514,8 @@ export class ChatRecordingService {
displayContent?: PartListUnion;
id?: string;
}): string {
if (!this.conversationFile || !this.cachedConversation) return message.id || randomUUID();
if (!this.conversationFile || !this.cachedConversation)
return message.id || randomUUID();
try {
const msg = this.newMessage(
@@ -938,13 +939,16 @@ export class ChatRecordingService {
(m) =>
m.type === 'gemini' &&
m.toolCalls?.some((tc) => tc.id === callId),
) as MessageRecord & { type: 'gemini' };
if (geminiMsg) {
);
if (geminiMsg && geminiMsg.type === 'gemini') {
const tc = geminiMsg.toolCalls!.find((tc) => tc.id === callId);
if (tc) {
// If the history version is different (e.g. masked), sync it into the record
// We sync the entire parts array of the user turn to ensure sibling parts are preserved
if (JSON.stringify(tc.result) !== JSON.stringify(turn.content.parts)) {
if (
JSON.stringify(tc.result) !==
JSON.stringify(turn.content.parts)
) {
tc.result = turn.content.parts || [];
updated = true;
}
@@ -954,7 +958,10 @@ export class ChatRecordingService {
}
}
if (updated || newMessages.length !== this.cachedConversation.messages.length) {
if (
updated ||
newMessages.length !== this.cachedConversation.messages.length
) {
this.cachedConversation.messages = newMessages;
this.updateMetadata({
messages: newMessages,
@@ -86,6 +86,9 @@ export type ConversationRecordExtra =
*/
export type MessageRecord = BaseMessageRecord & ConversationRecordExtra;
/**
* Complete conversation record stored in session files.
*/
export interface ConversationRecord {
sessionId: string;
projectHash: string;
@@ -99,6 +102,7 @@ export interface ConversationRecord {
/** The kind of conversation (main agent or subagent) */
kind?: 'main' | 'subagent';
}
/**
* Data structure for resuming an existing session.
*/
@@ -0,0 +1,35 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect } from 'vitest';
import { deriveStableId } from './cryptoUtils.js';
describe('cryptoUtils', () => {
describe('deriveStableId', () => {
it('should be deterministic regardless of input order', () => {
const id1 = deriveStableId(['a', 'b', 'c']);
const id2 = deriveStableId(['c', 'b', 'a']);
expect(id1).toBe(id2);
expect(id1).toMatch(/^[0-9a-f]{32}$/);
});
it('should produce different IDs for different inputs', () => {
const id1 = deriveStableId(['a', 'b', 'c']);
const id2 = deriveStableId(['a', 'b', 'd']);
expect(id1).not.toBe(id2);
});
it('should handle single inputs', () => {
const id = deriveStableId(['only-one']);
expect(id).toMatch(/^[0-9a-f]{32}$/);
});
it('should be consistent across calls with same data', () => {
const input = ['id-123', 'id-456'];
expect(deriveStableId(input)).toBe(deriveStableId(input));
});
});
});
+1 -3
View File
@@ -308,9 +308,7 @@ function enforceRoleConstraints(
* Deep-scrubs the history to remove any non-standard properties from Content and Part objects.
* This ensures compatibility with strict APIs (like Vertex AI) that reject unknown fields.
*/
export function scrubHistory(
history: HistoryTurn[],
): HistoryTurn[] {
export function scrubHistory(history: HistoryTurn[]): HistoryTurn[] {
return history.map((turn) => ({
id: turn.id,
content: {