Shorten temp directory (#17901)

This commit is contained in:
joshualitt
2026-02-06 08:10:17 -08:00
committed by GitHub
parent 30354892b3
commit 6fb3b09003
24 changed files with 989 additions and 27 deletions
+45
View File
@@ -13,6 +13,7 @@
"dependencies": { "dependencies": {
"ink": "npm:@jrichman/ink@6.4.8", "ink": "npm:@jrichman/ink@6.4.8",
"latest-version": "^9.0.0", "latest-version": "^9.0.0",
"proper-lockfile": "^4.1.2",
"simple-git": "^3.28.0" "simple-git": "^3.28.0"
}, },
"bin": { "bin": {
@@ -26,6 +27,7 @@
"@types/minimatch": "^5.1.2", "@types/minimatch": "^5.1.2",
"@types/mock-fs": "^4.13.4", "@types/mock-fs": "^4.13.4",
"@types/prompts": "^2.4.9", "@types/prompts": "^2.4.9",
"@types/proper-lockfile": "^4.1.4",
"@types/react": "^19.2.0", "@types/react": "^19.2.0",
"@types/react-dom": "^19.2.0", "@types/react-dom": "^19.2.0",
"@types/shell-quote": "^1.7.5", "@types/shell-quote": "^1.7.5",
@@ -4108,6 +4110,16 @@
"kleur": "^3.0.3" "kleur": "^3.0.3"
} }
}, },
"node_modules/@types/proper-lockfile": {
"version": "4.1.4",
"resolved": "https://registry.npmjs.org/@types/proper-lockfile/-/proper-lockfile-4.1.4.tgz",
"integrity": "sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@types/retry": "*"
}
},
"node_modules/@types/qs": { "node_modules/@types/qs": {
"version": "6.14.0", "version": "6.14.0",
"resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.14.0.tgz",
@@ -4203,6 +4215,13 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/@types/retry": {
"version": "0.12.5",
"resolved": "https://registry.npmjs.org/@types/retry/-/retry-0.12.5.tgz",
"integrity": "sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/sarif": { "node_modules/@types/sarif": {
"version": "2.1.7", "version": "2.1.7",
"resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz", "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz",
@@ -14052,6 +14071,32 @@
"react-is": "^16.13.1" "react-is": "^16.13.1"
} }
}, },
"node_modules/proper-lockfile": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-4.1.2.tgz",
"integrity": "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==",
"license": "MIT",
"dependencies": {
"graceful-fs": "^4.2.4",
"retry": "^0.12.0",
"signal-exit": "^3.0.2"
}
},
"node_modules/proper-lockfile/node_modules/retry": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
"integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==",
"license": "MIT",
"engines": {
"node": ">= 4"
}
},
"node_modules/proper-lockfile/node_modules/signal-exit": {
"version": "3.0.7",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
"integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
"license": "ISC"
},
"node_modules/proto-list": { "node_modules/proto-list": {
"version": "1.2.4", "version": "1.2.4",
"resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz",
+2
View File
@@ -86,6 +86,7 @@
"@types/minimatch": "^5.1.2", "@types/minimatch": "^5.1.2",
"@types/mock-fs": "^4.13.4", "@types/mock-fs": "^4.13.4",
"@types/prompts": "^2.4.9", "@types/prompts": "^2.4.9",
"@types/proper-lockfile": "^4.1.4",
"@types/react": "^19.2.0", "@types/react": "^19.2.0",
"@types/react-dom": "^19.2.0", "@types/react-dom": "^19.2.0",
"@types/shell-quote": "^1.7.5", "@types/shell-quote": "^1.7.5",
@@ -126,6 +127,7 @@
"dependencies": { "dependencies": {
"ink": "npm:@jrichman/ink@6.4.8", "ink": "npm:@jrichman/ink@6.4.8",
"latest-version": "^9.0.0", "latest-version": "^9.0.0",
"proper-lockfile": "^4.1.2",
"simple-git": "^3.28.0" "simple-git": "^3.28.0"
}, },
"optionalDependencies": { "optionalDependencies": {
+4
View File
@@ -38,6 +38,10 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
disableMouseEvents: vi.fn(), disableMouseEvents: vi.fn(),
enterAlternateScreen: vi.fn(), enterAlternateScreen: vi.fn(),
disableLineWrapping: vi.fn(), disableLineWrapping: vi.fn(),
ProjectRegistry: vi.fn().mockImplementation(() => ({
initialize: vi.fn(),
getShortId: vi.fn().mockReturnValue('project-slug'),
})),
}; };
}); });
@@ -55,6 +55,9 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
'shell_history', 'shell_history',
); );
} }
initialize(): Promise<undefined> {
return Promise.resolve(undefined);
}
} }
return { return {
...actual, ...actual,
@@ -24,6 +24,7 @@ async function getHistoryFilePath(
configStorage?: Storage, configStorage?: Storage,
): Promise<string> { ): Promise<string> {
const storage = configStorage ?? new Storage(projectRoot); const storage = configStorage ?? new Storage(projectRoot);
await storage.initialize();
return storage.getHistoryFilePath(); return storage.getHistoryFilePath();
} }
@@ -45,6 +45,7 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
}, },
Storage: class { Storage: class {
getProjectTempDir = vi.fn(() => '/tmp/global'); getProjectTempDir = vi.fn(() => '/tmp/global');
initialize = vi.fn(() => Promise.resolve(undefined));
}, },
}; };
}); });
+6 -3
View File
@@ -256,8 +256,11 @@ const saveFileWithXclip = async (tempFilePath: string) => {
* @param targetDir The root directory of the current project. * @param targetDir The root directory of the current project.
* @returns The absolute path to the images directory. * @returns The absolute path to the images directory.
*/ */
function getProjectClipboardImagesDir(targetDir: string): string { async function getProjectClipboardImagesDir(
targetDir: string,
): Promise<string> {
const storage = new Storage(targetDir); const storage = new Storage(targetDir);
await storage.initialize();
const baseDir = storage.getProjectTempDir(); const baseDir = storage.getProjectTempDir();
return path.join(baseDir, 'images'); return path.join(baseDir, 'images');
} }
@@ -271,7 +274,7 @@ export async function saveClipboardImage(
targetDir: string, targetDir: string,
): Promise<string | null> { ): Promise<string | null> {
try { try {
const tempDir = getProjectClipboardImagesDir(targetDir); const tempDir = await getProjectClipboardImagesDir(targetDir);
await fs.mkdir(tempDir, { recursive: true }); await fs.mkdir(tempDir, { recursive: true });
// Generate a unique filename with timestamp // Generate a unique filename with timestamp
@@ -396,7 +399,7 @@ export async function cleanupOldClipboardImages(
targetDir: string, targetDir: string,
): Promise<void> { ): Promise<void> {
try { try {
const tempDir = getProjectClipboardImagesDir(targetDir); const tempDir = await getProjectClipboardImagesDir(targetDir);
const files = await fs.readdir(tempDir); const files = await fs.readdir(tempDir);
const oneHourAgo = Date.now() - 60 * 60 * 1000; const oneHourAgo = Date.now() - 60 * 60 * 1000;
@@ -18,6 +18,7 @@ vi.mock('@google/gemini-cli-core', async (importOriginal) => {
spawnAsync: vi.fn(), spawnAsync: vi.fn(),
Storage: class { Storage: class {
getProjectTempDir = vi.fn(() => "C:\\User's Files"); getProjectTempDir = vi.fn(() => "C:\\User's Files");
initialize = vi.fn(() => Promise.resolve(undefined));
}, },
}; };
}); });
+1
View File
@@ -11,6 +11,7 @@ import * as path from 'node:path';
vi.mock('@google/gemini-cli-core', () => ({ vi.mock('@google/gemini-cli-core', () => ({
Storage: vi.fn().mockImplementation(() => ({ Storage: vi.fn().mockImplementation(() => ({
getProjectTempDir: vi.fn().mockReturnValue('/tmp/project'), getProjectTempDir: vi.fn().mockReturnValue('/tmp/project'),
initialize: vi.fn().mockResolvedValue(undefined),
})), })),
shutdownTelemetry: vi.fn(), shutdownTelemetry: vi.fn(),
isTelemetrySdkInitialized: vi.fn().mockReturnValue(false), isTelemetrySdkInitialized: vi.fn().mockReturnValue(false),
+1
View File
@@ -102,6 +102,7 @@ async function drainStdin() {
export async function cleanupCheckpoints() { export async function cleanupCheckpoints() {
const storage = new Storage(process.cwd()); const storage = new Storage(process.cwd());
await storage.initialize();
const tempDir = storage.getProjectTempDir(); const tempDir = storage.getProjectTempDir();
const checkpointsDir = join(tempDir, 'checkpoints'); const checkpointsDir = join(tempDir, 'checkpoints');
try { try {
+6 -2
View File
@@ -362,8 +362,12 @@ export async function cleanupToolOutputFiles(
} }
const retentionConfig = settings.general.sessionRetention; const retentionConfig = settings.general.sessionRetention;
const tempDir = let tempDir = projectTempDir;
projectTempDir ?? new Storage(process.cwd()).getProjectTempDir(); if (!tempDir) {
const storage = new Storage(process.cwd());
await storage.initialize();
tempDir = storage.getProjectTempDir();
}
const toolOutputDir = path.join(tempDir, TOOL_OUTPUTS_DIR); const toolOutputDir = path.join(tempDir, TOOL_OUTPUTS_DIR);
// Check if directory exists // Check if directory exists
+2
View File
@@ -895,6 +895,8 @@ export class Config {
} }
this.initialized = true; this.initialized = true;
await this.storage.initialize();
// Add pending directories to workspace context // Add pending directories to workspace context
for (const dir of this.pendingIncludeDirectories) { for (const dir of this.pendingIncludeDirectories) {
this.workspaceContext.addDirectory(dir); this.workspaceContext.addDirectory(dir);
@@ -0,0 +1,303 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
vi.unmock('./projectRegistry.js');
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { ProjectRegistry } from './projectRegistry.js';
import { lock } from 'proper-lockfile';
vi.mock('proper-lockfile');
describe('ProjectRegistry', () => {
let tempDir: string;
let registryPath: string;
let baseDir1: string;
let baseDir2: string;
function normalizePath(p: string): string {
let resolved = path.resolve(p);
if (os.platform() === 'win32') {
resolved = resolved.toLowerCase();
}
return resolved;
}
beforeEach(() => {
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-registry-test-'));
registryPath = path.join(tempDir, 'projects.json');
baseDir1 = path.join(tempDir, 'base1');
baseDir2 = path.join(tempDir, 'base2');
fs.mkdirSync(baseDir1);
fs.mkdirSync(baseDir2);
vi.mocked(lock).mockResolvedValue(vi.fn().mockResolvedValue(undefined));
});
afterEach(() => {
fs.rmSync(tempDir, { recursive: true, force: true });
vi.clearAllMocks();
});
it('generates a short ID from the basename', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const projectPath = path.join(tempDir, 'my-project');
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('my-project');
});
it('slugifies the project name', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const projectPath = path.join(tempDir, 'My Project! @2025');
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('my-project-2025');
});
it('handles collisions with unique suffixes', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const id1 = await registry.getShortId(path.join(tempDir, 'one', 'gemini'));
const id2 = await registry.getShortId(path.join(tempDir, 'two', 'gemini'));
const id3 = await registry.getShortId(
path.join(tempDir, 'three', 'gemini'),
);
expect(id1).toBe('gemini');
expect(id2).toBe('gemini-1');
expect(id3).toBe('gemini-2');
});
it('persists and reloads the registry', async () => {
const projectPath = path.join(tempDir, 'project-a');
const registry1 = new ProjectRegistry(registryPath);
await registry1.initialize();
await registry1.getShortId(projectPath);
const registry2 = new ProjectRegistry(registryPath);
await registry2.initialize();
const id = await registry2.getShortId(projectPath);
expect(id).toBe('project-a');
const data = JSON.parse(fs.readFileSync(registryPath, 'utf8'));
// Use the actual normalized path as key
const normalizedPath = normalizePath(projectPath);
expect(data.projects[normalizedPath]).toBe('project-a');
});
it('normalizes paths', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const path1 = path.join(tempDir, 'project');
const path2 = path.join(path1, '..', 'project');
const id1 = await registry.getShortId(path1);
const id2 = await registry.getShortId(path2);
expect(id1).toBe(id2);
});
it('creates ownership markers in base directories', async () => {
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
const projectPath = normalizePath(path.join(tempDir, 'project-x'));
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('project-x');
const marker1 = path.join(baseDir1, shortId, '.project_root');
const marker2 = path.join(baseDir2, shortId, '.project_root');
expect(normalizePath(fs.readFileSync(marker1, 'utf8'))).toBe(projectPath);
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('recovers mapping from disk if registry is missing it', async () => {
// 1. Setup a project with ownership markers
const projectPath = normalizePath(path.join(tempDir, 'project-x'));
const slug = 'project-x';
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(path.join(slugDir, '.project_root'), projectPath);
// 2. Initialize registry (it has no projects.json)
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 3. getShortId should find it from disk
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe(slug);
// 4. It should have populated the markers in other base dirs too
const marker2 = path.join(baseDir2, slug, '.project_root');
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('handles collisions if a slug is taken on disk by another project', async () => {
// 1. project-y takes 'gemini' on disk
const projectY = normalizePath(path.join(tempDir, 'project-y'));
const slug = 'gemini';
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(path.join(slugDir, '.project_root'), projectY);
// 2. project-z tries to get shortId for 'gemini'
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
const projectZ = normalizePath(path.join(tempDir, 'gemini'));
const shortId = await registry.getShortId(projectZ);
// 3. It should avoid 'gemini' and pick 'gemini-1' (or similar)
expect(shortId).not.toBe('gemini');
expect(shortId).toBe('gemini-1');
});
it('invalidates registry mapping if disk ownership changed', async () => {
// 1. Registry thinks my-project owns 'my-project'
const projectPath = normalizePath(path.join(tempDir, 'my-project'));
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: 'my-project',
},
}),
);
// 2. But disk says project-b owns 'my-project'
const slugDir = path.join(baseDir1, 'my-project');
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(
path.join(slugDir, '.project_root'),
normalizePath(path.join(tempDir, 'project-b')),
);
// 3. my-project asks for its ID
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
const id = await registry.getShortId(projectPath);
// 4. It should NOT get 'my-project' because it's owned by project-b on disk.
// It should get 'my-project-1' instead.
expect(id).not.toBe('my-project');
expect(id).toBe('my-project-1');
});
it('repairs missing ownership markers in other base directories', async () => {
const projectPath = normalizePath(path.join(tempDir, 'project-repair'));
const slug = 'repair-me';
// 1. Marker exists in base1 but NOT in base2
const slugDir1 = path.join(baseDir1, slug);
fs.mkdirSync(slugDir1, { recursive: true });
fs.writeFileSync(path.join(slugDir1, '.project_root'), projectPath);
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 2. getShortId should find it and repair base2
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe(slug);
const marker2 = path.join(baseDir2, slug, '.project_root');
expect(fs.existsSync(marker2)).toBe(true);
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('heals if both markers are missing but registry mapping exists', async () => {
const projectPath = normalizePath(path.join(tempDir, 'project-heal-both'));
const slug = 'heal-both';
// 1. Registry has the mapping
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: slug,
},
}),
);
// 2. No markers on disk
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 3. getShortId should recreate them
const id = await registry.getShortId(projectPath);
expect(id).toBe(slug);
expect(fs.existsSync(path.join(baseDir1, slug, '.project_root'))).toBe(
true,
);
expect(fs.existsSync(path.join(baseDir2, slug, '.project_root'))).toBe(
true,
);
expect(
normalizePath(
fs.readFileSync(path.join(baseDir1, slug, '.project_root'), 'utf8'),
),
).toBe(projectPath);
});
it('handles corrupted (unreadable) ownership markers by picking a new slug', async () => {
const projectPath = normalizePath(path.join(tempDir, 'corrupt-slug'));
const slug = 'corrupt-slug';
// 1. Marker exists but is owned by someone else
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(
path.join(slugDir, '.project_root'),
normalizePath(path.join(tempDir, 'something-else')),
);
// 2. Registry also thinks we own it
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: slug,
},
}),
);
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
// 3. It should see the collision/corruption and pick a new one
const id = await registry.getShortId(projectPath);
expect(id).toBe(`${slug}-1`);
});
it('throws on lock timeout', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
vi.mocked(lock).mockRejectedValue(new Error('Lock timeout'));
await expect(registry.getShortId('/foo')).rejects.toThrow('Lock timeout');
expect(lock).toHaveBeenCalledWith(
registryPath,
expect.objectContaining({
retries: expect.any(Object),
}),
);
});
it('throws if not initialized', async () => {
const registry = new ProjectRegistry(registryPath);
await expect(registry.getShortId('/foo')).rejects.toThrow(
'ProjectRegistry must be initialized before use',
);
});
});
+320
View File
@@ -0,0 +1,320 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { lock } from 'proper-lockfile';
import { debugLogger } from '../utils/debugLogger.js';
export interface RegistryData {
projects: Record<string, string>;
}
const PROJECT_ROOT_FILE = '.project_root';
const LOCK_TIMEOUT_MS = 10000;
const LOCK_RETRY_DELAY_MS = 100;
/**
* Manages a mapping between absolute project paths and short, human-readable identifiers.
* This helps reduce context bloat and makes temporary directories easier to work with.
*/
export class ProjectRegistry {
private readonly registryPath: string;
private readonly baseDirs: string[];
private data: RegistryData | undefined;
private initPromise: Promise<void> | undefined;
constructor(registryPath: string, baseDirs: string[] = []) {
this.registryPath = registryPath;
this.baseDirs = baseDirs;
}
/**
* Initializes the registry by loading data from disk.
*/
async initialize(): Promise<void> {
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = (async () => {
if (this.data) {
return;
}
this.data = await this.loadData();
})();
return this.initPromise;
}
private async loadData(): Promise<RegistryData> {
if (!fs.existsSync(this.registryPath)) {
return { projects: {} };
}
try {
const content = await fs.promises.readFile(this.registryPath, 'utf8');
return JSON.parse(content);
} catch (e) {
debugLogger.debug('Failed to load registry: ', e);
// If the registry is corrupted, we'll start fresh to avoid blocking the CLI
return { projects: {} };
}
}
private normalizePath(projectPath: string): string {
let resolved = path.resolve(projectPath);
if (os.platform() === 'win32') {
resolved = resolved.toLowerCase();
}
return resolved;
}
private async save(data: RegistryData): Promise<void> {
const dir = path.dirname(this.registryPath);
if (!fs.existsSync(dir)) {
await fs.promises.mkdir(dir, { recursive: true });
}
try {
const content = JSON.stringify(data, null, 2);
const tmpPath = `${this.registryPath}.tmp`;
await fs.promises.writeFile(tmpPath, content, 'utf8');
await fs.promises.rename(tmpPath, this.registryPath);
} catch (error) {
debugLogger.error(
`Failed to save project registry to ${this.registryPath}:`,
error,
);
}
}
/**
* Returns a short identifier for the given project path.
* If the project is not already in the registry, a new identifier is generated and saved.
*/
async getShortId(projectPath: string): Promise<string> {
if (!this.data) {
throw new Error('ProjectRegistry must be initialized before use');
}
const normalizedPath = this.normalizePath(projectPath);
// Ensure directory exists so we can create a lock file
const dir = path.dirname(this.registryPath);
if (!fs.existsSync(dir)) {
await fs.promises.mkdir(dir, { recursive: true });
}
// Ensure the registry file exists so proper-lockfile can lock it
if (!fs.existsSync(this.registryPath)) {
await this.save({ projects: {} });
}
// Use proper-lockfile to prevent racy updates
const release = await lock(this.registryPath, {
retries: {
retries: Math.floor(LOCK_TIMEOUT_MS / LOCK_RETRY_DELAY_MS),
minTimeout: LOCK_RETRY_DELAY_MS,
},
});
try {
// Re-load data under lock to get the latest state
const currentData = await this.loadData();
this.data = currentData;
let shortId: string | undefined = currentData.projects[normalizedPath];
// If we have a mapping, verify it against the folders on disk
if (shortId) {
if (await this.verifySlugOwnership(shortId, normalizedPath)) {
// HEAL: If it passed verification but markers are missing (e.g. new base dir or deleted marker), recreate them.
await this.ensureOwnershipMarkers(shortId, normalizedPath);
return shortId;
}
// If verification fails, it means the registry is out of sync or someone else took it.
// We'll remove the mapping and find/generate a new one.
delete currentData.projects[normalizedPath];
}
// Try to find if this project already has folders assigned that we didn't know about
shortId = await this.findExistingSlugForPath(normalizedPath);
if (!shortId) {
// Generate a new one
shortId = await this.claimNewSlug(normalizedPath, currentData.projects);
}
currentData.projects[normalizedPath] = shortId;
await this.save(currentData);
return shortId;
} finally {
await release();
}
}
private async verifySlugOwnership(
slug: string,
projectPath: string,
): Promise<boolean> {
if (this.baseDirs.length === 0) {
return true; // Nothing to verify against
}
for (const baseDir of this.baseDirs) {
const markerPath = path.join(baseDir, slug, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
try {
const owner = (await fs.promises.readFile(markerPath, 'utf8')).trim();
if (this.normalizePath(owner) !== this.normalizePath(projectPath)) {
return false;
}
} catch (e) {
debugLogger.debug(
`Failed to read ownership marker ${markerPath}:`,
e,
);
// If we can't read it, assume it's not ours or corrupted.
return false;
}
}
}
return true;
}
private async findExistingSlugForPath(
projectPath: string,
): Promise<string | undefined> {
if (this.baseDirs.length === 0) {
return undefined;
}
const normalizedTarget = this.normalizePath(projectPath);
// Scan all base dirs to see if any slug already belongs to this project
for (const baseDir of this.baseDirs) {
if (!fs.existsSync(baseDir)) {
continue;
}
try {
const candidates = await fs.promises.readdir(baseDir);
for (const candidate of candidates) {
const markerPath = path.join(baseDir, candidate, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
const owner = (
await fs.promises.readFile(markerPath, 'utf8')
).trim();
if (this.normalizePath(owner) === normalizedTarget) {
// Found it! Ensure all base dirs have the marker
await this.ensureOwnershipMarkers(candidate, normalizedTarget);
return candidate;
}
}
}
} catch (e) {
debugLogger.debug(`Failed to scan base dir ${baseDir}:`, e);
}
}
return undefined;
}
private async claimNewSlug(
projectPath: string,
existingMappings: Record<string, string>,
): Promise<string> {
const baseName = path.basename(projectPath) || 'project';
const slug = this.slugify(baseName);
let counter = 0;
const existingIds = new Set(Object.values(existingMappings));
while (true) {
const candidate = counter === 0 ? slug : `${slug}-${counter}`;
counter++;
// Check if taken in registry
if (existingIds.has(candidate)) {
continue;
}
// Check if taken on disk
let diskCollision = false;
for (const baseDir of this.baseDirs) {
const markerPath = path.join(baseDir, candidate, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
try {
const owner = (
await fs.promises.readFile(markerPath, 'utf8')
).trim();
if (this.normalizePath(owner) !== this.normalizePath(projectPath)) {
diskCollision = true;
break;
}
} catch (_e) {
// If we can't read it, assume it's someone else's to be safe
diskCollision = true;
break;
}
}
}
if (diskCollision) {
continue;
}
// Try to claim it
try {
await this.ensureOwnershipMarkers(candidate, projectPath);
return candidate;
} catch (_e) {
// Someone might have claimed it between our check and our write.
// Try next candidate.
continue;
}
}
}
private async ensureOwnershipMarkers(
slug: string,
projectPath: string,
): Promise<void> {
const normalizedProject = this.normalizePath(projectPath);
for (const baseDir of this.baseDirs) {
const slugDir = path.join(baseDir, slug);
if (!fs.existsSync(slugDir)) {
await fs.promises.mkdir(slugDir, { recursive: true });
}
const markerPath = path.join(slugDir, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
const owner = (await fs.promises.readFile(markerPath, 'utf8')).trim();
if (this.normalizePath(owner) === normalizedProject) {
continue;
}
// Collision!
throw new Error(`Slug ${slug} is already owned by ${owner}`);
}
// Use flag: 'wx' to ensure atomic creation
await fs.promises.writeFile(markerPath, normalizedProject, {
encoding: 'utf8',
flag: 'wx',
});
}
}
private slugify(text: string): string {
return (
text
.toLowerCase()
.replace(/[^a-z0-9]/g, '-')
.replace(/-+/g, '-')
.replace(/^-|-$/g, '') || 'project'
);
}
}
+54 -2
View File
@@ -4,7 +4,12 @@
* SPDX-License-Identifier: Apache-2.0 * SPDX-License-Identifier: Apache-2.0
*/ */
import { describe, it, expect, vi, afterEach } from 'vitest'; import { beforeEach, describe, it, expect, vi, afterEach } from 'vitest';
vi.unmock('./storage.js');
vi.unmock('./projectRegistry.js');
vi.unmock('./storageMigration.js');
import * as os from 'node:os'; import * as os from 'node:os';
import * as path from 'node:path'; import * as path from 'node:path';
@@ -18,6 +23,52 @@ vi.mock('fs', async (importOriginal) => {
import { Storage } from './storage.js'; import { Storage } from './storage.js';
import { GEMINI_DIR, homedir } from '../utils/paths.js'; import { GEMINI_DIR, homedir } from '../utils/paths.js';
import { ProjectRegistry } from './projectRegistry.js';
import { StorageMigration } from './storageMigration.js';
const PROJECT_SLUG = 'project-slug';
vi.mock('./projectRegistry.js');
vi.mock('./storageMigration.js');
describe('Storage initialize', () => {
const projectRoot = '/tmp/project';
let storage: Storage;
beforeEach(() => {
ProjectRegistry.prototype.initialize = vi.fn().mockResolvedValue(undefined);
ProjectRegistry.prototype.getShortId = vi
.fn()
.mockReturnValue(PROJECT_SLUG);
storage = new Storage(projectRoot);
vi.clearAllMocks();
// Mock StorageMigration.migrateDirectory
vi.mocked(StorageMigration.migrateDirectory).mockResolvedValue(undefined);
});
it('sets up the registry and performs migration if `getProjectTempDir` is called', async () => {
await storage.initialize();
expect(storage.getProjectTempDir()).toBe(
path.join(os.homedir(), GEMINI_DIR, 'tmp', PROJECT_SLUG),
);
// Verify registry initialization
expect(ProjectRegistry).toHaveBeenCalled();
expect(vi.mocked(ProjectRegistry).prototype.initialize).toHaveBeenCalled();
expect(
vi.mocked(ProjectRegistry).prototype.getShortId,
).toHaveBeenCalledWith(projectRoot);
// Verify migration calls
const shortId = 'project-slug';
// We can't easily get the hash here without repeating logic, but we can verify it's called twice
expect(StorageMigration.migrateDirectory).toHaveBeenCalledTimes(2);
// Verify identifier is set by checking a path
expect(storage.getProjectTempDir()).toContain(shortId);
});
});
vi.mock('../utils/paths.js', async (importOriginal) => { vi.mock('../utils/paths.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../utils/paths.js')>(); const actual = await importOriginal<typeof import('../utils/paths.js')>();
@@ -103,7 +154,8 @@ describe('Storage additional helpers', () => {
expect(Storage.getGlobalBinDir()).toBe(expected); expect(Storage.getGlobalBinDir()).toBe(expected);
}); });
it('getProjectTempPlansDir returns ~/.gemini/tmp/<hash>/plans', () => { it('getProjectTempPlansDir returns ~/.gemini/tmp/<identifier>/plans', async () => {
await storage.initialize();
const tempDir = storage.getProjectTempDir(); const tempDir = storage.getProjectTempDir();
const expected = path.join(tempDir, 'plans'); const expected = path.join(tempDir, 'plans');
expect(storage.getProjectTempPlansDir()).toBe(expected); expect(storage.getProjectTempPlansDir()).toBe(expected);
+66 -5
View File
@@ -9,6 +9,8 @@ import * as os from 'node:os';
import * as crypto from 'node:crypto'; import * as crypto from 'node:crypto';
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import { GEMINI_DIR, homedir } from '../utils/paths.js'; import { GEMINI_DIR, homedir } from '../utils/paths.js';
import { ProjectRegistry } from './projectRegistry.js';
import { StorageMigration } from './storageMigration.js';
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json'; export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
export const OAUTH_FILE = 'oauth_creds.json'; export const OAUTH_FILE = 'oauth_creds.json';
@@ -18,6 +20,8 @@ const AGENTS_DIR_NAME = '.agents';
export class Storage { export class Storage {
private readonly targetDir: string; private readonly targetDir: string;
private projectIdentifier: string | undefined;
private initPromise: Promise<void> | undefined;
constructor(targetDir: string) { constructor(targetDir: string) {
this.targetDir = targetDir; this.targetDir = targetDir;
@@ -125,9 +129,9 @@ export class Storage {
} }
getProjectTempDir(): string { getProjectTempDir(): string {
const hash = this.getFilePathHash(this.getProjectRoot()); const identifier = this.getProjectIdentifier();
const tempDir = Storage.getGlobalTempDir(); const tempDir = Storage.getGlobalTempDir();
return path.join(tempDir, hash); return path.join(tempDir, identifier);
} }
ensureProjectTempDirExists(): void { ensureProjectTempDirExists(): void {
@@ -146,10 +150,67 @@ export class Storage {
return crypto.createHash('sha256').update(filePath).digest('hex'); return crypto.createHash('sha256').update(filePath).digest('hex');
} }
getHistoryDir(): string { private getProjectIdentifier(): string {
const hash = this.getFilePathHash(this.getProjectRoot()); if (!this.projectIdentifier) {
throw new Error('Storage must be initialized before use');
}
return this.projectIdentifier;
}
/**
* Initializes storage by setting up the project registry and performing migrations.
*/
async initialize(): Promise<void> {
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = (async () => {
if (this.projectIdentifier) {
return;
}
const registryPath = path.join(
Storage.getGlobalGeminiDir(),
'projects.json',
);
const registry = new ProjectRegistry(registryPath, [
Storage.getGlobalTempDir(),
path.join(Storage.getGlobalGeminiDir(), 'history'),
]);
await registry.initialize();
this.projectIdentifier = await registry.getShortId(this.getProjectRoot());
await this.performMigration();
})();
return this.initPromise;
}
/**
* Performs migration of legacy hash-based directories to the new slug-based format.
* This is called internally by initialize().
*/
private async performMigration(): Promise<void> {
const shortId = this.getProjectIdentifier();
const oldHash = this.getFilePathHash(this.getProjectRoot());
// Migrate Temp Dir
const newTempDir = path.join(Storage.getGlobalTempDir(), shortId);
const oldTempDir = path.join(Storage.getGlobalTempDir(), oldHash);
await StorageMigration.migrateDirectory(oldTempDir, newTempDir);
// Migrate History Dir
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history'); const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
return path.join(historyDir, hash); const newHistoryDir = path.join(historyDir, shortId);
const oldHistoryDir = path.join(historyDir, oldHash);
await StorageMigration.migrateDirectory(oldHistoryDir, newHistoryDir);
}
getHistoryDir(): string {
const identifier = this.getProjectIdentifier();
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
return path.join(historyDir, identifier);
} }
getWorkspaceSettingsPath(): string { getWorkspaceSettingsPath(): string {
@@ -0,0 +1,77 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
vi.unmock('./storageMigration.js');
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { StorageMigration } from './storageMigration.js';
describe('StorageMigration', () => {
let tempDir: string;
beforeEach(() => {
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-migration-test-'));
});
afterEach(() => {
fs.rmSync(tempDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
it('migrates a directory from old to new path (non-destructively)', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'new-slug');
fs.mkdirSync(oldPath);
fs.writeFileSync(path.join(oldPath, 'test.txt'), 'hello');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(true);
expect(fs.existsSync(oldPath)).toBe(true); // Should still exist
expect(fs.readFileSync(path.join(newPath, 'test.txt'), 'utf8')).toBe(
'hello',
);
});
it('does nothing if old path does not exist', async () => {
const oldPath = path.join(tempDir, 'non-existent');
const newPath = path.join(tempDir, 'new-slug');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(false);
});
it('does nothing if new path already exists', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'new-slug');
fs.mkdirSync(oldPath);
fs.mkdirSync(newPath);
fs.writeFileSync(path.join(oldPath, 'old.txt'), 'old');
fs.writeFileSync(path.join(newPath, 'new.txt'), 'new');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(oldPath)).toBe(true);
expect(fs.existsSync(path.join(newPath, 'new.txt'))).toBe(true);
expect(fs.existsSync(path.join(newPath, 'old.txt'))).toBe(false);
});
it('creates parent directory for new path if it does not exist', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'sub', 'new-slug');
fs.mkdirSync(oldPath);
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(true);
expect(fs.existsSync(oldPath)).toBe(true); // Should still exist
});
});
@@ -0,0 +1,44 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { debugLogger } from '../utils/debugLogger.js';
/**
* Migration utility to move data from old hash-based directories to new slug-based directories.
*/
export class StorageMigration {
/**
* Migrates a directory from an old path to a new path if the old one exists and the new one doesn't.
* @param oldPath The old directory path (hash-based).
* @param newPath The new directory path (slug-based).
*/
static async migrateDirectory(
oldPath: string,
newPath: string,
): Promise<void> {
try {
// If the new path already exists, we consider migration done or skipped to avoid overwriting.
// If the old path doesn't exist, there's nothing to migrate.
if (fs.existsSync(newPath) || !fs.existsSync(oldPath)) {
return;
}
// Ensure the parent directory of the new path exists
const parentDir = path.dirname(newPath);
await fs.promises.mkdir(parentDir, { recursive: true });
// Copy (safer and handles cross-device moves)
await fs.promises.cp(oldPath, newPath, { recursive: true });
} catch (e) {
debugLogger.debug(
`Storage Migration: Failed to move ${oldPath} to ${newPath}:`,
e,
);
}
}
}
+7 -5
View File
@@ -25,19 +25,21 @@ import { Storage } from '../config/storage.js';
import { promises as fs, existsSync } from 'node:fs'; import { promises as fs, existsSync } from 'node:fs';
import path from 'node:path'; import path from 'node:path';
import type { Content } from '@google/genai'; import type { Content } from '@google/genai';
import crypto from 'node:crypto';
import os from 'node:os'; import os from 'node:os';
import { GEMINI_DIR } from '../utils/paths.js'; import { GEMINI_DIR } from '../utils/paths.js';
import { debugLogger } from '../utils/debugLogger.js'; import { debugLogger } from '../utils/debugLogger.js';
const PROJECT_SLUG = 'project-slug';
const TMP_DIR_NAME = 'tmp'; const TMP_DIR_NAME = 'tmp';
const LOG_FILE_NAME = 'logs.json'; const LOG_FILE_NAME = 'logs.json';
const CHECKPOINT_FILE_NAME = 'checkpoint.json'; const CHECKPOINT_FILE_NAME = 'checkpoint.json';
const projectDir = process.cwd(); const TEST_GEMINI_DIR = path.join(
const hash = crypto.createHash('sha256').update(projectDir).digest('hex'); os.homedir(),
const TEST_GEMINI_DIR = path.join(os.homedir(), GEMINI_DIR, TMP_DIR_NAME, hash); GEMINI_DIR,
TMP_DIR_NAME,
PROJECT_SLUG,
);
const TEST_LOG_FILE_PATH = path.join(TEST_GEMINI_DIR, LOG_FILE_NAME); const TEST_LOG_FILE_PATH = path.join(TEST_GEMINI_DIR, LOG_FILE_NAME);
const TEST_CHECKPOINT_FILE_PATH = path.join( const TEST_CHECKPOINT_FILE_PATH = path.join(
+1
View File
@@ -141,6 +141,7 @@ export class Logger {
return; return;
} }
await this.storage.initialize();
this.geminiDir = this.storage.getProjectTempDir(); this.geminiDir = this.storage.getProjectTempDir();
this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME); this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
+2
View File
@@ -12,6 +12,8 @@ import type { PolicySettings } from './types.js';
import { ApprovalMode, PolicyDecision, InProcessCheckerType } from './types.js'; import { ApprovalMode, PolicyDecision, InProcessCheckerType } from './types.js';
import { isDirectorySecure } from '../utils/security.js'; import { isDirectorySecure } from '../utils/security.js';
vi.unmock('../config/storage.js');
vi.mock('../utils/security.js', () => ({ vi.mock('../utils/security.js', () => ({
isDirectorySecure: vi.fn().mockResolvedValue({ secure: true }), isDirectorySecure: vi.fn().mockResolvedValue({ secure: true }),
})); }));
+5 -10
View File
@@ -18,13 +18,11 @@ import { Storage } from '../config/storage.js';
import * as path from 'node:path'; import * as path from 'node:path';
import * as fs from 'node:fs/promises'; import * as fs from 'node:fs/promises';
import * as os from 'node:os'; import * as os from 'node:os';
import { import { GEMINI_DIR, homedir as pathsHomedir } from '../utils/paths.js';
getProjectHash,
GEMINI_DIR,
homedir as pathsHomedir,
} from '../utils/paths.js';
import { spawnAsync } from '../utils/shell-utils.js'; import { spawnAsync } from '../utils/shell-utils.js';
const PROJECT_SLUG = 'project-slug';
vi.mock('../utils/shell-utils.js', () => ({ vi.mock('../utils/shell-utils.js', () => ({
spawnAsync: vi.fn(), spawnAsync: vi.fn(),
})); }));
@@ -85,7 +83,6 @@ describe('GitService', () => {
let testRootDir: string; let testRootDir: string;
let projectRoot: string; let projectRoot: string;
let homedir: string; let homedir: string;
let hash: string;
let storage: Storage; let storage: Storage;
beforeEach(async () => { beforeEach(async () => {
@@ -95,8 +92,6 @@ describe('GitService', () => {
await fs.mkdir(projectRoot, { recursive: true }); await fs.mkdir(projectRoot, { recursive: true });
await fs.mkdir(homedir, { recursive: true }); await fs.mkdir(homedir, { recursive: true });
hash = getProjectHash(projectRoot);
vi.clearAllMocks(); vi.clearAllMocks();
hoistedIsGitRepositoryMock.mockReturnValue(true); hoistedIsGitRepositoryMock.mockReturnValue(true);
(spawnAsync as Mock).mockResolvedValue({ (spawnAsync as Mock).mockResolvedValue({
@@ -181,8 +176,8 @@ describe('GitService', () => {
let repoDir: string; let repoDir: string;
let gitConfigPath: string; let gitConfigPath: string;
beforeEach(() => { beforeEach(async () => {
repoDir = path.join(homedir, GEMINI_DIR, 'history', hash); repoDir = path.join(homedir, GEMINI_DIR, 'history', PROJECT_SLUG);
gitConfigPath = path.join(repoDir, '.gitconfig'); gitConfigPath = path.join(repoDir, '.gitconfig');
}); });
+1
View File
@@ -33,6 +33,7 @@ export class GitService {
'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.', 'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.',
); );
} }
await this.storage.initialize();
try { try {
await this.setupShadowGitRepository(); await this.setupShadowGitRepository();
} catch (error) { } catch (error) {
+36
View File
@@ -10,6 +10,42 @@ if (process.env.NO_COLOR !== undefined) {
} }
import { setSimulate429 } from './src/utils/testUtils.js'; import { setSimulate429 } from './src/utils/testUtils.js';
import { vi } from 'vitest';
// Disable 429 simulation globally for all tests // Disable 429 simulation globally for all tests
setSimulate429(false); setSimulate429(false);
// Default mocks for Storage and ProjectRegistry to prevent disk access in most tests.
// These can be overridden in specific tests using vi.unmock().
vi.mock('./src/config/projectRegistry.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('./src/config/projectRegistry.js')>();
actual.ProjectRegistry.prototype.initialize = vi.fn(() =>
Promise.resolve(undefined),
);
actual.ProjectRegistry.prototype.getShortId = vi.fn(() =>
Promise.resolve('project-slug'),
);
return actual;
});
vi.mock('./src/config/storageMigration.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('./src/config/storageMigration.js')>();
actual.StorageMigration.migrateDirectory = vi.fn(() =>
Promise.resolve(undefined),
);
return actual;
});
vi.mock('./src/config/storage.js', async (importOriginal) => {
const actual =
await importOriginal<typeof import('./src/config/storage.js')>();
actual.Storage.prototype.initialize = vi.fn(() => Promise.resolve(undefined));
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(actual.Storage.prototype as any).getProjectIdentifier = vi.fn(
() => 'project-slug',
);
return actual;
});