Fix bugs where Rewind and Resume showed Ugly and 100X too verbose content. (#17940)

This commit is contained in:
Jacob Richman
2026-01-30 10:09:27 -08:00
committed by GitHub
parent f14d0c6a17
commit bb6a336ca9
16 changed files with 212 additions and 20 deletions

View File

@@ -890,6 +890,7 @@ ${JSON.stringify(
{ model: 'default-routed-model' },
initialRequest,
expect.any(AbortSignal),
undefined,
);
});
@@ -1707,6 +1708,7 @@ ${JSON.stringify(
{ model: 'routed-model' },
[{ text: 'Hi' }],
expect.any(AbortSignal),
undefined,
);
});
@@ -1724,6 +1726,7 @@ ${JSON.stringify(
{ model: 'routed-model' },
[{ text: 'Hi' }],
expect.any(AbortSignal),
undefined,
);
// Second turn
@@ -1741,6 +1744,7 @@ ${JSON.stringify(
{ model: 'routed-model' },
[{ text: 'Continue' }],
expect.any(AbortSignal),
undefined,
);
});
@@ -1758,6 +1762,7 @@ ${JSON.stringify(
{ model: 'routed-model' },
[{ text: 'Hi' }],
expect.any(AbortSignal),
undefined,
);
// New prompt
@@ -1779,6 +1784,7 @@ ${JSON.stringify(
{ model: 'new-routed-model' },
[{ text: 'A new topic' }],
expect.any(AbortSignal),
undefined,
);
});
@@ -1806,6 +1812,7 @@ ${JSON.stringify(
{ model: 'original-model' },
[{ text: 'Hi' }],
expect.any(AbortSignal),
undefined,
);
mockRouterService.route.mockResolvedValue({
@@ -1828,6 +1835,7 @@ ${JSON.stringify(
{ model: 'fallback-model' },
[{ text: 'Continue' }],
expect.any(AbortSignal),
undefined,
);
});
});
@@ -1912,6 +1920,7 @@ ${JSON.stringify(
{ model: 'default-routed-model' },
initialRequest,
expect.any(AbortSignal),
undefined,
);
// Second call with "Please continue."
@@ -1920,6 +1929,7 @@ ${JSON.stringify(
{ model: 'default-routed-model' },
[{ text: 'System: Please continue.' }],
expect.any(AbortSignal),
undefined,
);
});
@@ -2332,6 +2342,7 @@ ${JSON.stringify(
expect.objectContaining({ model: 'model-a' }),
expect.anything(),
expect.anything(),
undefined,
);
});
@@ -3183,6 +3194,7 @@ ${JSON.stringify(
expect.anything(),
[{ text: 'Please explain' }],
expect.anything(),
undefined,
);
});

View File

@@ -532,6 +532,7 @@ export class GeminiClient {
prompt_id: string,
boundedTurns: number,
isInvalidStreamRetry: boolean,
displayContent?: PartListUnion,
): AsyncGenerator<ServerGeminiStreamEvent, Turn> {
// Re-initialize turn (it was empty before if in loop, or new instance)
let turn = new Turn(this.getChat(), prompt_id);
@@ -647,7 +648,12 @@ export class GeminiClient {
yield { type: GeminiEventType.ModelInfo, value: modelToUse };
}
this.currentSequenceModel = modelToUse;
const resultStream = turn.run(modelConfigKey, request, linkedSignal);
const resultStream = turn.run(
modelConfigKey,
request,
linkedSignal,
displayContent,
);
let isError = false;
let isInvalidStream = false;
@@ -708,6 +714,7 @@ export class GeminiClient {
prompt_id,
boundedTurns - 1,
true,
displayContent,
);
return turn;
}
@@ -739,7 +746,8 @@ export class GeminiClient {
signal,
prompt_id,
boundedTurns - 1,
// isInvalidStreamRetry is false
false, // isInvalidStreamRetry is false
displayContent,
);
return turn;
}
@@ -754,6 +762,7 @@ export class GeminiClient {
prompt_id: string,
turns: number = MAX_TURNS,
isInvalidStreamRetry: boolean = false,
displayContent?: PartListUnion,
): AsyncGenerator<ServerGeminiStreamEvent, Turn> {
if (!isInvalidStreamRetry) {
this.config.resetTurn();
@@ -809,6 +818,7 @@ export class GeminiClient {
prompt_id,
boundedTurns,
isInvalidStreamRetry,
displayContent,
);
// Fire AfterAgent hook if we have a turn and no pending tools
@@ -860,6 +870,8 @@ export class GeminiClient {
signal,
prompt_id,
boundedTurns - 1,
false,
displayContent,
);
}
}

View File

@@ -268,6 +268,7 @@ export class GeminiChat {
* @param message - The list of messages to send.
* @param prompt_id - The ID of the prompt.
* @param signal - An abort signal for this message.
* @param displayContent - An optional user-friendly version of the message to record.
* @return The model's response.
*
* @example
@@ -286,6 +287,7 @@ export class GeminiChat {
message: PartListUnion,
prompt_id: string,
signal: AbortSignal,
displayContent?: PartListUnion,
): Promise<AsyncGenerator<StreamEvent>> {
await this.sendPromise;
@@ -302,12 +304,25 @@ export class GeminiChat {
// Record user input - capture complete message with all parts (text, files, images, etc.)
// but skip recording function responses (tool call results) as they should be stored in tool call records
if (!isFunctionResponse(userContent)) {
const userMessage = Array.isArray(message) ? message : [message];
const userMessageContent = partListUnionToString(toParts(userMessage));
const userMessageParts = userContent.parts || [];
const userMessageContent = partListUnionToString(userMessageParts);
let finalDisplayContent: Part[] | undefined = undefined;
if (displayContent !== undefined) {
const displayParts = toParts(
Array.isArray(displayContent) ? displayContent : [displayContent],
);
const displayContentString = partListUnionToString(displayParts);
if (displayContentString !== userMessageContent) {
finalDisplayContent = displayParts;
}
}
this.chatRecordingService.recordMessage({
model,
type: 'user',
content: userMessageContent,
content: userMessageParts,
displayContent: finalDisplayContent,
});
}

View File

@@ -102,6 +102,7 @@ describe('Turn', () => {
reqParts,
'prompt-id-1',
expect.any(AbortSignal),
undefined,
);
expect(events).toEqual([

View File

@@ -248,6 +248,7 @@ export class Turn {
modelConfigKey: ModelConfigKey,
req: PartListUnion,
signal: AbortSignal,
displayContent?: PartListUnion,
): AsyncGenerator<ServerGeminiStreamEvent> {
try {
// Note: This assumes `sendMessageStream` yields events like
@@ -257,6 +258,7 @@ export class Turn {
req,
this.prompt_id,
signal,
displayContent,
);
for await (const streamEvent of responseStream) {

View File

@@ -130,6 +130,7 @@ describe('ChatRecordingService', () => {
chatRecordingService.recordMessage({
type: 'user',
content: 'Hello',
displayContent: 'User Hello',
model: 'gemini-pro',
});
expect(mkdirSyncSpy).toHaveBeenCalled();
@@ -139,6 +140,7 @@ describe('ChatRecordingService', () => {
) as ConversationRecord;
expect(conversation.messages).toHaveLength(1);
expect(conversation.messages[0].content).toBe('Hello');
expect(conversation.messages[0].displayContent).toBe('User Hello');
expect(conversation.messages[0].type).toBe('user');
});

View File

@@ -47,6 +47,7 @@ export interface BaseMessageRecord {
id: string;
timestamp: string;
content: PartListUnion;
displayContent?: PartListUnion;
}
/**
@@ -207,12 +208,14 @@ export class ChatRecordingService {
private newMessage(
type: ConversationRecordExtra['type'],
content: PartListUnion,
displayContent?: PartListUnion,
): MessageRecord {
return {
id: randomUUID(),
timestamp: new Date().toISOString(),
type,
content,
displayContent,
};
}
@@ -223,12 +226,17 @@ export class ChatRecordingService {
model: string | undefined;
type: ConversationRecordExtra['type'];
content: PartListUnion;
displayContent?: PartListUnion;
}): void {
if (!this.conversationFile) return;
try {
this.updateConversation((conversation) => {
const msg = this.newMessage(message.type, message.content);
const msg = this.newMessage(
message.type,
message.content,
message.displayContent,
);
if (msg.type === 'gemini') {
// If it's a new Gemini message then incorporate any queued thoughts.
conversation.messages.push({