feat(core): implement session learnings summary hook

This commit is contained in:
Aishanee Shah
2026-02-11 16:32:15 +00:00
parent 84ce53aafa
commit f72359efef
6 changed files with 298 additions and 1 deletions

View File

@@ -796,6 +796,7 @@ export async function loadCliConfig(
toolOutputMasking: settings.experimental?.toolOutputMasking,
noBrowser: !!process.env['NO_BROWSER'],
summarizeToolOutput: settings.model?.summarizeToolOutput,
sessionLearnings: settings.general?.sessionLearnings?.enabled,
ideMode,
disableLoopDetection: settings.model?.disableLoopDetection,
compressionThreshold: settings.model?.compressionThreshold,

View File

@@ -322,6 +322,27 @@ const SETTINGS_SCHEMA = {
},
description: 'Settings for automatic session cleanup.',
},
sessionLearnings: {
type: 'object',
label: 'Session Learnings',
category: 'General',
requiresRestart: false,
default: {},
description: 'Settings for session learning summaries.',
showInDialog: false,
properties: {
enabled: {
type: 'boolean',
label: 'Enable Session Learnings',
category: 'General',
requiresRestart: false,
default: false,
description:
'Automatically generate a session-learnings.md file when the session ends.',
showInDialog: true,
},
},
},
},
},
output: {

View File

@@ -477,6 +477,7 @@ export interface ConfigParameters {
experimentalJitContext?: boolean;
toolOutputMasking?: Partial<ToolOutputMaskingConfig>;
disableLLMCorrection?: boolean;
sessionLearnings?: boolean;
plan?: boolean;
onModelChange?: (model: string) => void;
mcpEnabled?: boolean;
@@ -662,6 +663,7 @@ export class Config {
private readonly experimentalJitContext: boolean;
private readonly disableLLMCorrection: boolean;
private readonly sessionLearnings: boolean;
private readonly planEnabled: boolean;
private contextManager?: ContextManager;
private terminalBackground: string | undefined = undefined;
@@ -750,6 +752,7 @@ export class Config {
this.enableAgents = params.enableAgents ?? false;
this.agents = params.agents ?? {};
this.disableLLMCorrection = params.disableLLMCorrection ?? true;
this.sessionLearnings = params.sessionLearnings ?? false;
this.planEnabled = params.plan ?? false;
this.enableEventDrivenScheduler = params.enableEventDrivenScheduler ?? true;
this.skillsSupport = params.skillsSupport ?? true;
@@ -1953,6 +1956,10 @@ export class Config {
return this.disableLLMCorrection;
}
isSessionLearningsEnabled(): boolean {
return this.sessionLearnings;
}
isPlanEnabled(): boolean {
return this.planEnabled;
}

View File

@@ -33,6 +33,7 @@ import type {
ToolListUnion,
} from '@google/genai';
import type { ToolCallConfirmationDetails } from '../tools/tools.js';
import { SessionLearningsService } from '../services/sessionLearningsService.js';
/**
* Main hook system that coordinates all hook-related functionality
@@ -151,6 +152,7 @@ export class HookSystem {
private readonly hookAggregator: HookAggregator;
private readonly hookPlanner: HookPlanner;
private readonly hookEventHandler: HookEventHandler;
private readonly sessionLearningsService: SessionLearningsService;
constructor(config: Config) {
// Initialize components
@@ -164,6 +166,7 @@ export class HookSystem {
this.hookRunner,
this.hookAggregator,
);
this.sessionLearningsService = new SessionLearningsService(config);
}
/**
@@ -215,7 +218,14 @@ export class HookSystem {
async fireSessionEndEvent(
reason: SessionEndReason,
): Promise<AggregatedHookResult | undefined> {
return this.hookEventHandler.fireSessionEndEvent(reason);
const result = await this.hookEventHandler.fireSessionEndEvent(reason);
// Built-in system hook for session learnings
if (reason === 'exit' || reason === 'logout') {
await this.sessionLearningsService.generateAndSaveLearnings();
}
return result;
}
async firePreCompressEvent(

View File

@@ -0,0 +1,125 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/* eslint-disable @typescript-eslint/no-explicit-any */
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { SessionLearningsService } from './sessionLearningsService.js';
import type { Config } from '../config/config.js';
import type { GenerateContentResponse } from '@google/genai';
import fs from 'node:fs/promises';
import path from 'node:path';
describe('SessionLearningsService', () => {
let service: SessionLearningsService;
let mockConfig: unknown;
let mockRecordingService: any;
let mockGeminiClient: any;
let mockContentGenerator: any;
let mockGenerateContent: any;
beforeEach(() => {
vi.clearAllMocks();
mockGenerateContent = vi.fn().mockResolvedValue({
candidates: [
{
content: {
parts: [{ text: '# Session Learnings\nSummary text here.' }],
},
},
],
} as unknown as GenerateContentResponse);
mockContentGenerator = {
generateContent: mockGenerateContent,
};
mockRecordingService = {
getConversation: vi.fn().mockReturnValue({
messages: [
{ type: 'user', content: [{ text: 'Question' }] },
{ type: 'gemini', content: [{ text: 'Answer' }] },
],
}),
};
mockGeminiClient = {
getChatRecordingService: () => mockRecordingService,
};
mockConfig = {
isSessionLearningsEnabled: vi.fn().mockReturnValue(true),
getGeminiClient: () => mockGeminiClient,
getContentGenerator: () => mockContentGenerator,
getWorkingDir: () => '/mock/cwd',
getActiveModel: () => 'gemini-1.5-flash',
getModel: () => 'gemini-1.5-flash',
isInteractive: () => true,
setActiveModel: vi.fn(),
getUserTier: () => 'free',
getContentGeneratorConfig: () => ({ authType: 'apiKey' }),
getModelAvailabilityService: () => ({
selectFirstAvailable: (models: string[]) => ({
selectedModel: models[0],
}),
consumeStickyAttempt: vi.fn(),
markHealthy: vi.fn(),
}),
modelConfigService: {
getResolvedConfig: vi
.fn()
.mockReturnValue({ model: 'gemini-1.5-flash', config: {} }),
},
};
service = new SessionLearningsService(mockConfig as Config);
vi.spyOn(fs, 'writeFile').mockResolvedValue(undefined);
});
afterEach(() => {
vi.restoreAllMocks();
});
it('should generate and save learnings when enabled and enough messages exist', async () => {
await service.generateAndSaveLearnings();
expect(mockGenerateContent).toHaveBeenCalled();
expect(fs.writeFile).toHaveBeenCalledWith(
path.join('/mock/cwd', 'session-learnings.md'),
'# Session Learnings\nSummary text here.',
'utf-8',
);
});
it('should not generate learnings if disabled', async () => {
(mockConfig as any).isSessionLearningsEnabled.mockReturnValue(false);
await service.generateAndSaveLearnings();
expect(mockGenerateContent).not.toHaveBeenCalled();
expect(fs.writeFile).not.toHaveBeenCalled();
});
it('should not generate learnings if not enough messages', async () => {
mockRecordingService.getConversation.mockReturnValue({
messages: [{ type: 'user', content: [{ text: 'Single message' }] }],
});
await service.generateAndSaveLearnings();
expect(mockGenerateContent).not.toHaveBeenCalled();
});
it('should handle errors gracefully', async () => {
mockGenerateContent.mockRejectedValue(new Error('LLM Error'));
// Should not throw
await expect(service.generateAndSaveLearnings()).resolves.not.toThrow();
});
});

View File

@@ -0,0 +1,133 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import type { Config } from '../config/config.js';
import { BaseLlmClient } from '../core/baseLlmClient.js';
import { debugLogger } from '../utils/debugLogger.js';
import { partListUnionToString } from '../core/geminiRequest.js';
import { getResponseText } from '../utils/partUtils.js';
import type { Content } from '@google/genai';
import fs from 'node:fs/promises';
import path from 'node:path';
const MIN_MESSAGES = 2;
const MAX_MESSAGES_FOR_CONTEXT = 30;
const MAX_MESSAGE_LENGTH = 1000;
const TIMEOUT_MS = 30000;
const LEARNINGS_FILENAME = 'session-learnings.md';
const LEARNINGS_PROMPT = `It's time to pause on this development. Looking back at what you have done so far:
Prepare a summary of the problem you were trying to solve, the analysis synthesized, and information you would need to implement this request if you were to start again
Don't focus on unnecessary details - keep the abstraction at a level that allows a senior engineer for example, to take it from you.
Do focus on gotchas, explored paths that didn't go anywhere with a why, and what you'd do differently.
Also note down other issues you might have found as future project ideas.
Conversation transcript follows:
---
{transcript}
---
Provide your response in Markdown format.`;
/**
* Service to generate and save session learnings summaries.
*/
export class SessionLearningsService {
constructor(private readonly config: Config) {}
/**
* Generates a summary of the session learnings and saves it to a file.
*/
async generateAndSaveLearnings(): Promise<void> {
try {
// Check if enabled in settings
if (!this.config.isSessionLearningsEnabled()) {
return;
}
const geminiClient = this.config.getGeminiClient();
const recordingService = geminiClient.getChatRecordingService();
const conversation = recordingService.getConversation();
if (!conversation || conversation.messages.length < MIN_MESSAGES) {
debugLogger.debug(
`[SessionLearnings] Skipping summary, not enough messages (${conversation?.messages.length || 0})`,
);
return;
}
// Prepare transcript
const relevantMessages = conversation.messages.slice(
-MAX_MESSAGES_FOR_CONTEXT,
);
const transcript = relevantMessages
.map((msg) => {
const role = msg.type === 'user' ? 'User' : 'Assistant';
const content = partListUnionToString(msg.content);
const truncated =
content.length > MAX_MESSAGE_LENGTH
? content.slice(0, MAX_MESSAGE_LENGTH) + '...'
: content;
return `[${role}]: ${truncated}`;
})
.join('\n\n');
const prompt = LEARNINGS_PROMPT.replace('{transcript}', transcript);
const contentGenerator = this.config.getContentGenerator();
if (!contentGenerator) {
debugLogger.debug('[SessionLearnings] Content generator not available');
return;
}
const baseLlmClient = new BaseLlmClient(contentGenerator, this.config);
const abortController = new AbortController();
const timeoutId = setTimeout(() => abortController.abort(), TIMEOUT_MS);
try {
const contents: Content[] = [
{
role: 'user',
parts: [{ text: prompt }],
},
];
debugLogger.debug('[SessionLearnings] Generating summary...');
const response = await baseLlmClient.generateContent({
modelConfigKey: { model: 'summarizer-default' },
contents,
abortSignal: abortController.signal,
promptId: 'session-learnings-generation',
});
const summary = getResponseText(response);
if (!summary) {
debugLogger.warn(
'[SessionLearnings] Failed to generate summary (empty response)',
);
return;
}
const filePath = path.join(
this.config.getWorkingDir(),
LEARNINGS_FILENAME,
);
await fs.writeFile(filePath, summary, 'utf-8');
debugLogger.log(
`[SessionLearnings] Saved session learnings to ${filePath}`,
);
} finally {
clearTimeout(timeoutId);
}
} catch (error) {
debugLogger.warn(
`[SessionLearnings] Error generating learnings: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
}