feat(core): Support routing for subagents.

This commit is contained in:
Your Name
2026-01-06 22:40:15 +00:00
parent 8f0324d868
commit ca6cc1ecd3
9 changed files with 98 additions and 22 deletions

View File

@@ -40,6 +40,8 @@ import type {
} from './types.js';
import { AgentTerminateMode } from './types.js';
import { templateString } from './utils.js';
import { isAutoModel } from '../config/models.js';
import type { RoutingContext } from '../routing/routingStrategy.js';
import { parseThought } from '../utils/thoughtUtils.js';
import { type z } from 'zod';
import { zodToJsonSchema } from 'zod-to-json-schema';
@@ -589,9 +591,34 @@ export class LocalAgentExecutor<TOutput extends z.ZodTypeAny> {
signal: AbortSignal,
promptId: string,
): Promise<{ functionCalls: FunctionCall[]; textResponse: string }> {
const modelConfigAlias = getModelConfigAlias(this.definition);
// Resolve the model config early to get the concrete model string (which may be `auto`).
const resolvedConfig =
this.runtimeContext.modelConfigService.getResolvedConfig({
model: modelConfigAlias,
overrideScope: this.definition.name,
});
const requestedModel = resolvedConfig.model;
let modelToUse: string;
if (isAutoModel(requestedModel)) {
const routingContext: RoutingContext = {
history: chat.getHistory(/*curated=*/ true),
request: message.parts || [],
signal,
requestedModel,
};
const router = this.runtimeContext.getModelRouterService();
const decision = await router.route(routingContext);
modelToUse = decision.model;
} else {
modelToUse = requestedModel;
}
const responseStream = await chat.sendMessageStream(
{
model: getModelConfigAlias(this.definition),
model: modelToUse,
overrideScope: this.definition.name,
},
message.parts || [],

View File

@@ -19,6 +19,7 @@ import {
GEMINI_MODEL_ALIAS_AUTO,
PREVIEW_GEMINI_FLASH_MODEL,
isPreviewModel,
isAutoModel,
} from '../config/models.js';
import type { ModelConfigAlias } from '../services/modelConfigService.js';
@@ -206,24 +207,46 @@ export class AgentRegistry {
model = this.config.getModel();
}
const runtimeAlias: ModelConfigAlias = {
modelConfig: {
model,
generateContentConfig: {
temperature: modelConfig.temp,
topP: modelConfig.top_p,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: modelConfig.thinkingBudget ?? -1,
},
},
const generateContentConfig = {
temperature: modelConfig.temp,
topP: modelConfig.top_p,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: modelConfig.thinkingBudget ?? -1,
},
};
this.config.modelConfigService.registerRuntimeModelConfig(
getModelConfigAlias(definition),
runtimeAlias,
);
if (isAutoModel(model)) {
this.config.modelConfigService.registerRuntimeModelOverride({
match: {
model: getModelConfigAlias(definition),
},
modelConfig: {
model,
generateContentConfig,
},
});
this.config.modelConfigService.registerRuntimeModelOverride({
match: {
overrideScope: definition.name,
},
modelConfig: {
generateContentConfig,
},
});
} else {
const runtimeAlias: ModelConfigAlias = {
modelConfig: {
model,
generateContentConfig,
},
};
this.config.modelConfigService.registerRuntimeModelConfig(
getModelConfigAlias(definition),
runtimeAlias,
);
}
}
/**

View File

@@ -148,6 +148,20 @@ export function isGemini2Model(model: string): boolean {
return /^gemini-2(\.|$)/.test(model);
}
/**
* Checks if the model is an auto model.
*
* @param model The model name to check.
* @returns True if the model is an auto model.
*/
export function isAutoModel(model: string): boolean {
return (
model === GEMINI_MODEL_ALIAS_AUTO ||
model === PREVIEW_GEMINI_MODEL_AUTO ||
model === DEFAULT_GEMINI_MODEL_AUTO
);
}
/**
* Checks if the model supports multimodal function responses (multimodal data nested within function response).
* This is supported in Gemini 3.

View File

@@ -605,6 +605,7 @@ export class GeminiClient {
history: this.getChat().getHistory(/*curated=*/ true),
request,
signal,
requestedModel: this.config.getModel(),
};
let modelToUse: string;

View File

@@ -35,6 +35,8 @@ export interface RoutingContext {
request: PartListUnion;
/** An abort signal to cancel an LLM call during routing. */
signal: AbortSignal;
/** The model string requested for this turn, if any. */
requestedModel?: string;
}
/**

View File

@@ -168,7 +168,7 @@ export class ClassifierStrategy implements RoutingStrategy {
const reasoning = routerResponse.reasoning;
const latencyMs = Date.now() - startTime;
const selectedModel = resolveClassifierModel(
config.getModel(),
context.requestedModel || config.getModel(),
routerResponse.model_choice,
config.getPreviewFeatures(),
);

View File

@@ -18,11 +18,11 @@ export class FallbackStrategy implements RoutingStrategy {
readonly name = 'fallback';
async route(
_context: RoutingContext,
context: RoutingContext,
config: Config,
_baseLlmClient: BaseLlmClient,
): Promise<RoutingDecision | null> {
const requestedModel = config.getModel();
const requestedModel = context.requestedModel || config.getModel();
const resolvedModel = resolveModel(
requestedModel,
config.getPreviewFeatures(),

View File

@@ -24,11 +24,11 @@ export class OverrideStrategy implements RoutingStrategy {
readonly name = 'override';
async route(
_context: RoutingContext,
context: RoutingContext,
config: Config,
_baseLlmClient: BaseLlmClient,
): Promise<RoutingDecision | null> {
const overrideModel = config.getModel();
const overrideModel = context.requestedModel || config.getModel();
// If the model is 'auto' we should pass to the next strategy.
if (

View File

@@ -65,6 +65,7 @@ export interface _ResolvedModelConfig {
export class ModelConfigService {
private readonly runtimeAliases: Record<string, ModelConfigAlias> = {};
private readonly runtimeOverrides: ModelConfigOverride[] = [];
// TODO(12597): Process config to build a typed alias hierarchy.
constructor(private readonly config: ModelConfigServiceConfig) {}
@@ -73,6 +74,10 @@ export class ModelConfigService {
this.runtimeAliases[aliasName] = alias;
}
registerRuntimeModelOverride(override: ModelConfigOverride): void {
this.runtimeOverrides.push(override);
}
private resolveAlias(
aliasName: string,
aliases: Record<string, ModelConfigAlias>,
@@ -123,7 +128,11 @@ export class ModelConfigService {
...customAliases,
...this.runtimeAliases,
};
const allOverrides = [...overrides, ...customOverrides];
const allOverrides = [
...overrides,
...customOverrides,
...this.runtimeOverrides,
];
let baseModel: string | undefined = context.model;
let resolvedConfig: GenerateContentConfig = {};