fix(core): flush transcript for pure tool-call responses to ensure BeforeTool hooks see complete state (#20419)

Co-authored-by: Bryan Morgan <bryanmorgan@google.com>
This commit is contained in:
krishdef7
2026-02-27 04:09:36 +05:30
committed by GitHub
parent edb1fdea30
commit f700c923d9
2 changed files with 59 additions and 2 deletions

View File

@@ -1032,6 +1032,59 @@ describe('GeminiChat', () => {
LlmRole.MAIN,
);
});
it('should flush transcript before tool dispatch for pure tool call with no text or thoughts', async () => {
const pureToolCallStream = (async function* () {
yield {
candidates: [
{
content: {
role: 'model',
parts: [
{
functionCall: {
name: 'read_file',
args: { path: 'test.py' },
},
},
],
},
},
],
} as unknown as GenerateContentResponse;
})();
vi.mocked(mockContentGenerator.generateContentStream).mockResolvedValue(
pureToolCallStream,
);
const { default: fs } = await import('node:fs');
const writeFileSync = vi.mocked(fs.writeFileSync);
const writeCountBefore = writeFileSync.mock.calls.length;
const stream = await chat.sendMessageStream(
{ model: 'test-model' },
'analyze test.py',
'prompt-id-pure-tool-flush',
new AbortController().signal,
LlmRole.MAIN,
);
for await (const _ of stream) {
// consume
}
const newWrites = writeFileSync.mock.calls.slice(writeCountBefore);
expect(newWrites.length).toBeGreaterThan(0);
const lastWriteData = JSON.parse(
newWrites[newWrites.length - 1][1] as string,
) as { messages: Array<{ type: string }> };
const geminiMessages = lastWriteData.messages.filter(
(m) => m.type === 'gemini',
);
expect(geminiMessages.length).toBeGreaterThan(0);
});
});
describe('addHistory', () => {

View File

@@ -818,6 +818,7 @@ export class GeminiChat {
const modelResponseParts: Part[] = [];
let hasToolCall = false;
let hasThoughts = false;
let finishReason: FinishReason | undefined;
for await (const chunk of streamResponse) {
@@ -834,6 +835,7 @@ export class GeminiChat {
if (content?.parts) {
if (content.parts.some((part) => part.thought)) {
// Record thoughts
hasThoughts = true;
this.recordThoughtFromContent(content);
}
if (content.parts.some((part) => part.functionCall)) {
@@ -901,8 +903,10 @@ export class GeminiChat {
.join('')
.trim();
// Record model response text from the collected parts
if (responseText) {
// Record model response text from the collected parts.
// Also flush when there are thoughts or a tool call (even with no text)
// so that BeforeTool hooks always see the latest transcript state.
if (responseText || hasThoughts || hasToolCall) {
this.chatRecordingService.recordMessage({
model,
type: 'gemini',