speculative partial fix for typed configs

This commit is contained in:
Your Name
2026-04-06 21:10:58 +00:00
parent 1774abebe9
commit dd7190bf9c
9 changed files with 350 additions and 53 deletions
+9 -15
View File
@@ -18,22 +18,24 @@ import { ContextTracer } from './tracer.js';
import type { ContextEnvironment } from './sidecar/environment.js';
import type { SidecarConfig } from './sidecar/types.js';
import { ProcessorRegistry } from './sidecar/registry.js';
import { PipelineOrchestrator } from './sidecar/orchestrator.js';
import { HistoryObserver } from './historyObserver.js';
import { generateWorkingBufferView } from './ir/graphUtils.js';
import { ToolMaskingProcessor } from './processors/toolMaskingProcessor.js';
import { BlobDegradationProcessor } from './processors/blobDegradationProcessor.js';
import { SemanticCompressionProcessor } from './processors/semanticCompressionProcessor.js';
import { HistorySquashingProcessor } from './processors/historySquashingProcessor.js';
import { StateSnapshotProcessor } from './processors/stateSnapshotProcessor.js';
import { EmergencyTruncationProcessor } from './processors/emergencyTruncationProcessor.js';
import { IrProjector } from './ir/projector.js';
import './sidecar/builtins.js';
export class ContextManager {
@@ -53,14 +55,6 @@ export class ContextManager {
constructor(private sidecar: SidecarConfig, private env: ContextEnvironment, private readonly tracer: ContextTracer) {
this.eventBus = env.eventBus;
// Register built-ins BEFORE creating Orchestrator
ProcessorRegistry.register({ id: 'ToolMaskingProcessor', create: (env, opts) => new ToolMaskingProcessor(env, opts as any) });
ProcessorRegistry.register({ id: 'BlobDegradationProcessor', create: (env, opts) => new BlobDegradationProcessor(env) });
ProcessorRegistry.register({ id: 'SemanticCompressionProcessor', create: (env, opts) => new SemanticCompressionProcessor(env, opts as any) });
ProcessorRegistry.register({ id: 'HistorySquashingProcessor', create: (env, opts) => new HistorySquashingProcessor(env, opts as any) });
ProcessorRegistry.register({ id: 'StateSnapshotProcessor', create: (env, opts) => StateSnapshotProcessor.create(env, opts as any) });
ProcessorRegistry.register({ id: 'EmergencyTruncationProcessor', create: (env, opts) => EmergencyTruncationProcessor.create(env, opts as any) });
this.orchestrator = new PipelineOrchestrator(this.sidecar, this.env, this.eventBus, this.tracer);
this.eventBus.onPristineHistoryUpdated((event) => {
@@ -0,0 +1,62 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import * as fs from 'node:fs';
import { SidecarLoader } from './SidecarLoader.js';
import { defaultSidecarProfile } from './profiles.js';
vi.mock('node:fs');
describe('SidecarLoader', () => {
beforeEach(() => {
vi.resetAllMocks();
});
const mockConfig = {
getExperimentalContextSidecarConfig: () => '/path/to/sidecar.json'
} as any;
it('returns default profile if file does not exist', () => {
vi.mocked(fs.existsSync).mockReturnValue(false);
const result = SidecarLoader.fromConfig(mockConfig);
expect(result).toBe(defaultSidecarProfile);
});
it('returns default profile if file exists but is 0 bytes', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ size: 0 } as any);
const result = SidecarLoader.fromConfig(mockConfig);
expect(result).toBe(defaultSidecarProfile);
});
it('throws an error if file is empty whitespace', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ size: 5 } as any);
vi.mocked(fs.readFileSync).mockReturnValue(' \n ');
expect(() => SidecarLoader.fromConfig(mockConfig)).toThrow('is empty');
});
it('returns parsed config if file is valid', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ size: 100 } as any);
const validConfig = {
budget: { retainedTokens: 1000, maxTokens: 2000 },
gcBackstop: { strategy: 'truncate', target: 'max' },
pipelines: []
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(validConfig));
const result = SidecarLoader.fromConfig(mockConfig);
expect(result).toEqual(validConfig);
});
it('throws an error if schema validation fails', () => {
vi.mocked(fs.existsSync).mockReturnValue(true);
vi.mocked(fs.statSync).mockReturnValue({ size: 100 } as any);
const invalidConfig = {
budget: { retainedTokens: "invalid string" }, // Invalid type
pipelines: []
};
vi.mocked(fs.readFileSync).mockReturnValue(JSON.stringify(invalidConfig));
expect(() => SidecarLoader.fromConfig(mockConfig)).toThrow('Validation error:');
});
});
@@ -8,25 +8,59 @@ import * as fs from 'node:fs';
import type { Config } from '../../config/config.js';
import type { SidecarConfig } from './types.js';
import { defaultSidecarProfile } from './profiles.js';
import { debugLogger } from 'src/utils/debugLogger.js';
import { SchemaValidator } from '../../utils/schemaValidator.js';
import { sidecarConfigSchema } from './schema.js';
export class SidecarLoader {
/**
* Loads and validates a sidecar config from a specific file path.
* Throws an error if the file cannot be read, parsed, or fails schema validation.
*/
static loadFromFile(sidecarPath: string): SidecarConfig {
const fileContent = fs.readFileSync(sidecarPath, 'utf8');
if (!fileContent.trim()) {
throw new Error(`Sidecar configuration file at ${sidecarPath} is empty.`);
}
let parsed: unknown;
try {
parsed = JSON.parse(fileContent);
} catch (error) {
throw new Error(
`Failed to parse Sidecar configuration file at ${sidecarPath}: ${
error instanceof Error ? error.message : String(error)
}`,
);
}
const validationError = SchemaValidator.validate(sidecarConfigSchema, parsed);
if (validationError) {
throw new Error(
`Invalid sidecar configuration in ${sidecarPath}. Validation error: ${validationError}`,
);
}
// Schema has been validated.
// eslint-disable-next-line @typescript-eslint/no-unsafe-type-assertion
return parsed as SidecarConfig;
}
/**
* Generates a Sidecar JSON graph from the experimental config file path or defaults.
* If a config file is present but invalid, this will THROW to prevent silent misconfiguration.
*/
static fromConfig(config: Config): SidecarConfig {
const sidecarPath = config.getExperimentalContextSidecarConfig()
const sidecarPath = config.getExperimentalContextSidecarConfig();
if (sidecarPath && fs.existsSync(sidecarPath)) {
try {
const fileContent = fs.readFileSync(sidecarPath, 'utf8');
return JSON.parse(fileContent) as SidecarConfig;
} catch (error) {
debugLogger.error(
`Failed to parse Sidecar configuration file at ${sidecarPath}:`,
error,
);
// Fallback to default
const stat = fs.statSync(sidecarPath);
// If the file exists but is completely empty (0 bytes), it's safe to fallback.
if (stat.size === 0) {
return defaultSidecarProfile;
}
// If the file has content, enforce strict validation and throw on failure.
return this.loadFromFile(sidecarPath);
}
return defaultSidecarProfile;
@@ -0,0 +1,115 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { ProcessorRegistry } from './registry.js';
import { ToolMaskingProcessor } from '../processors/toolMaskingProcessor.js';
import { BlobDegradationProcessor } from '../processors/blobDegradationProcessor.js';
import { SemanticCompressionProcessor } from '../processors/semanticCompressionProcessor.js';
import { HistorySquashingProcessor } from '../processors/historySquashingProcessor.js';
import { StateSnapshotProcessor } from '../processors/stateSnapshotProcessor.js';
import { EmergencyTruncationProcessor } from '../processors/emergencyTruncationProcessor.js';
export function registerBuiltInProcessors() {
ProcessorRegistry.register({
id: 'ToolMaskingProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'ToolMaskingProcessor' },
options: {
type: 'object',
properties: { stringLengthThresholdTokens: { type: 'number' } },
required: ['stringLengthThresholdTokens']
}
},
required: ['processorId', 'options']
},
create: (env, opts) => new ToolMaskingProcessor(env, opts as any)
});
ProcessorRegistry.register({
id: 'BlobDegradationProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'BlobDegradationProcessor' },
options: { type: 'object' }
},
required: ['processorId']
},
create: (env) => new BlobDegradationProcessor(env)
});
ProcessorRegistry.register({
id: 'SemanticCompressionProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'SemanticCompressionProcessor' },
options: {
type: 'object',
properties: { nodeThresholdTokens: { type: 'number' } },
required: ['nodeThresholdTokens']
}
},
required: ['processorId', 'options']
},
create: (env, opts) => new SemanticCompressionProcessor(env, opts as any)
});
ProcessorRegistry.register({
id: 'HistorySquashingProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'HistorySquashingProcessor' },
options: {
type: 'object',
properties: { maxTokensPerNode: { type: 'number' } },
required: ['maxTokensPerNode']
}
},
required: ['processorId', 'options']
},
create: (env, opts) => new HistorySquashingProcessor(env, opts as any)
});
ProcessorRegistry.register({
id: 'StateSnapshotProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'StateSnapshotProcessor' },
options: {
type: 'object',
properties: {
model: { type: 'string' },
systemInstruction: { type: 'string' },
triggerDeficitTokens: { type: 'number' }
}
}
},
required: ['processorId']
},
create: (env, opts) => StateSnapshotProcessor.create(env, opts as any)
});
ProcessorRegistry.register({
id: 'EmergencyTruncationProcessor',
schema: {
type: 'object',
properties: {
processorId: { const: 'EmergencyTruncationProcessor' },
options: { type: 'object' }
},
required: ['processorId']
},
create: (env, opts) => EmergencyTruncationProcessor.create(env, opts as any)
});
}
// Automatically register them upon import
registerBuiltInProcessors();
@@ -28,7 +28,7 @@ export const defaultSidecarProfile: SidecarConfig = {
processors: [
{ processorId: 'ToolMaskingProcessor', options: { stringLengthThresholdTokens: 8000 } },
{ processorId: 'BlobDegradationProcessor', options: {} },
{ processorId: 'SemanticCompressionProcessor', options: { nodeThresholdTokens: 5000, contextWindowPercentage: 0.2 } },
{ processorId: 'SemanticCompressionProcessor', options: { nodeThresholdTokens: 5000 } },
{ processorId: 'EmergencyTruncationProcessor', options: {} }
]
},
@@ -12,6 +12,7 @@ export interface ContextProcessorDef<
TOptions extends Record<string, unknown> = any,
> {
readonly id: string;
readonly schema?: object;
create(
env: ContextEnvironment,
options: TOptions,
@@ -36,6 +37,16 @@ export class ProcessorRegistry {
return def;
}
static getSchemas(): object[] {
const schemas: object[] = [];
for (const def of this.processors.values()) {
if (def.schema) {
schemas.push(def.schema);
}
}
return schemas;
}
static clear() {
this.processors.clear();
}
@@ -0,0 +1,97 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { ProcessorRegistry } from './registry.js';
import './builtins.js';
export const sidecarConfigSchema = {
$schema: "http://json-schema.org/draft-07/schema#",
title: "SidecarConfig",
description: "The Data-Driven Schema for the Context Manager.",
type: "object",
required: ["budget", "gcBackstop", "pipelines"],
properties: {
budget: {
type: "object",
description: "Defines the token ceilings and limits for the pipeline.",
required: ["retainedTokens", "maxTokens"],
properties: {
retainedTokens: {
type: "number",
description: "The ideal token count the pipeline tries to shrink down to."
},
maxTokens: {
type: "number",
description: "The absolute maximum token count allowed before synchronous truncation kicks in."
}
}
},
gcBackstop: {
type: "object",
description: "Defines what happens when the pipeline fails to compress under 'maxTokens'",
required: ["strategy", "target"],
properties: {
strategy: {
type: "string",
enum: ["truncate", "compress", "rollingSummarizer"]
},
target: {
type: "string",
enum: ["incremental", "freeNTokens", "max"]
},
freeTokensTarget: {
type: "number"
}
}
},
pipelines: {
type: "array",
description: "The execution graphs for context manipulation.",
items: {
type: "object",
required: ["name", "triggers", "execution", "processors"],
properties: {
name: {
type: "string"
},
triggers: {
type: "array",
items: {
anyOf: [
{
type: "string",
enum: ["on_turn", "post_turn", "budget_exceeded"]
},
{
type: "object",
required: ["type", "intervalMs"],
properties: {
type: {
type: "string",
const: "timer"
},
intervalMs: {
type: "number"
}
}
}
]
}
},
execution: {
type: "string",
enum: ["blocking", "background"]
},
processors: {
type: "array",
items: {
oneOf: ProcessorRegistry.getSchemas()
}
}
}
}
}
}
};
+9 -7
View File
@@ -4,16 +4,18 @@
* SPDX-License-Identifier: Apache-2.0
*/
import type { StateSnapshotProcessorOptions } from '../processors/stateSnapshotProcessor.js';
/**
* Definition of a processor or worker to be instantiated in the graph.
*/
export interface ProcessorConfig {
/** The registered ID of the processor (e.g. 'SemanticCompressionProcessor') */
processorId: string;
/** Dynamic, processor-specific hyperparameters */
options: Record<string, unknown>;
}
export type ProcessorConfig =
| { processorId: 'ToolMaskingProcessor'; options: { stringLengthThresholdTokens: number } }
| { processorId: 'BlobDegradationProcessor'; options?: Record<string, unknown> }
| { processorId: 'SemanticCompressionProcessor'; options: { nodeThresholdTokens: number } }
| { processorId: 'HistorySquashingProcessor'; options: { maxTokensPerNode: number } }
| { processorId: 'StateSnapshotProcessor'; options: StateSnapshotProcessorOptions }
| { processorId: 'EmergencyTruncationProcessor'; options?: Record<string, unknown> };
export type PipelineTrigger =
| 'on_turn'
@@ -1,18 +0,0 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import type { ContextEventBus } from '../eventBus.js';
export interface AsyncContextWorker {
/** The unique name of the worker (e.g., 'StateSnapshotWorker') */
readonly name: string;
/** Starts listening to the ContextEventBus for background tasks */
start(bus: ContextEventBus): void;
/** Stops listening and aborts any pending background tasks */
stop(): void;
}