feat(core): Use thinking level for Gemini 3 (#13445)

This commit is contained in:
joshualitt
2025-11-20 11:19:52 -08:00
committed by GitHub
parent ade9dfeebb
commit 8c07ad2ab9
2 changed files with 64 additions and 8 deletions

View File

@@ -6,7 +6,7 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import type { Content, GenerateContentResponse } from '@google/genai';
import { ApiError } from '@google/genai';
import { ApiError, ThinkingLevel } from '@google/genai';
import type { ContentGenerator } from '../core/contentGenerator.js';
import {
GeminiChat,
@@ -135,6 +135,9 @@ describe('GeminiChat', () => {
model: modelConfigKey.model,
generateContentConfig: {
temperature: 0,
thinkingConfig: {
thinkingBudget: 1000,
},
},
})),
},
@@ -972,12 +975,54 @@ describe('GeminiChat', () => {
systemInstruction: '',
tools: [],
temperature: 0,
thinkingConfig: {
thinkingBudget: 1000,
},
abortSignal: expect.any(AbortSignal),
},
},
'prompt-id-1',
);
});
it('should use thinkingLevel and remove thinkingBudget for gemini-3 models', async () => {
const response = (async function* () {
yield {
candidates: [
{
content: { parts: [{ text: 'response' }], role: 'model' },
finishReason: 'STOP',
},
],
} as unknown as GenerateContentResponse;
})();
vi.mocked(mockContentGenerator.generateContentStream).mockResolvedValue(
response,
);
const stream = await chat.sendMessageStream(
{ model: 'gemini-3-test-only-model-string-for-testing' },
'hello',
'prompt-id-thinking-level',
new AbortController().signal,
);
for await (const _ of stream) {
// consume stream
}
expect(mockContentGenerator.generateContentStream).toHaveBeenCalledWith(
expect.objectContaining({
model: 'gemini-3-test-only-model-string-for-testing',
config: expect.objectContaining({
thinkingConfig: {
thinkingBudget: undefined,
thinkingLevel: ThinkingLevel.HIGH,
},
}),
}),
'prompt-id-thinking-level',
);
});
});
describe('addHistory', () => {

View File

@@ -15,6 +15,7 @@ import type {
PartListUnion,
GenerateContentConfig,
} from '@google/genai';
import { ThinkingLevel } from '@google/genai';
import { toParts } from '../code_assist/converter.js';
import { createUserContent, FinishReason } from '@google/genai';
import { retryWithBackoff } from '../utils/retry.js';
@@ -412,6 +413,22 @@ export class GeminiChat {
}
effectiveModel = modelToUse;
const config = {
...generateContentConfig,
// TODO(12622): Ensure we don't overrwrite these when they are
// passed via config.
systemInstruction: this.systemInstruction,
tools: this.tools,
};
// TODO(joshualitt): Clean this up with model configs.
if (modelToUse.startsWith('gemini-3')) {
config.thinkingConfig = {
...config.thinkingConfig,
thinkingLevel: ThinkingLevel.HIGH,
};
delete config.thinkingConfig?.thinkingBudget;
}
return this.config.getContentGenerator().generateContentStream(
{
@@ -420,13 +437,7 @@ export class GeminiChat {
modelToUse === PREVIEW_GEMINI_MODEL
? contentsForPreviewModel
: requestContents,
config: {
...generateContentConfig,
// TODO(12622): Ensure we don't overrwrite these when they are
// passed via config.
systemInstruction: this.systemInstruction,
tools: this.tools,
},
config,
},
prompt_id,
);