fix(core): set temperature to 1 on retry in sendMessageStream (#10866)

This commit is contained in:
Sandy Tao
2025-10-10 09:14:12 -07:00
committed by GitHub
parent 38bc856212
commit 8dc397c0a5
2 changed files with 76 additions and 1 deletions

View File

@@ -864,6 +864,72 @@ describe('GeminiChat', () => {
expect(uiTelemetryService.setLastPromptTokenCount).not.toHaveBeenCalled();
});
it('should set temperature to 1 on retry', async () => {
// Use mockImplementationOnce to provide a fresh, promise-wrapped generator for each attempt.
vi.mocked(mockContentGenerator.generateContentStream)
.mockImplementationOnce(async () =>
// First call returns an invalid stream
(async function* () {
yield {
candidates: [{ content: { parts: [{ text: '' }] } }], // Invalid empty text part
} as unknown as GenerateContentResponse;
})(),
)
.mockImplementationOnce(async () =>
// Second call returns a valid stream
(async function* () {
yield {
candidates: [
{
content: { parts: [{ text: 'Successful response' }] },
finishReason: 'STOP',
},
],
} as unknown as GenerateContentResponse;
})(),
);
const stream = await chat.sendMessageStream(
'test-model',
{ message: 'test', config: { temperature: 0.5 } },
'prompt-id-retry-temperature',
);
for await (const _ of stream) {
// consume stream
}
expect(mockContentGenerator.generateContentStream).toHaveBeenCalledTimes(
2,
);
// First call should have original temperature
expect(
mockContentGenerator.generateContentStream,
).toHaveBeenNthCalledWith(
1,
expect.objectContaining({
config: expect.objectContaining({
temperature: 0.5,
}),
}),
'prompt-id-retry-temperature',
);
// Second call (retry) should have temperature 1
expect(
mockContentGenerator.generateContentStream,
).toHaveBeenNthCalledWith(
2,
expect.objectContaining({
config: expect.objectContaining({
temperature: 1,
}),
}),
'prompt-id-retry-temperature',
);
});
it('should fail after all retries on persistent invalid content and report metrics', async () => {
vi.mocked(mockContentGenerator.generateContentStream).mockImplementation(
async () =>

View File

@@ -273,10 +273,19 @@ export class GeminiChat {
yield { type: StreamEventType.RETRY };
}
// If this is a retry, set temperature to 1 to encourage different output.
const currentParams = { ...params };
if (attempt > 0) {
currentParams.config = {
...currentParams.config,
temperature: 1,
};
}
const stream = await self.makeApiCallAndProcessStream(
model,
requestContents,
params,
currentParams,
prompt_id,
);