Shorten temp directory (#17901)

This commit is contained in:
joshualitt
2026-02-06 08:10:17 -08:00
committed by GitHub
parent 30354892b3
commit 6fb3b09003
24 changed files with 989 additions and 27 deletions
+2
View File
@@ -895,6 +895,8 @@ export class Config {
}
this.initialized = true;
await this.storage.initialize();
// Add pending directories to workspace context
for (const dir of this.pendingIncludeDirectories) {
this.workspaceContext.addDirectory(dir);
@@ -0,0 +1,303 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
vi.unmock('./projectRegistry.js');
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { ProjectRegistry } from './projectRegistry.js';
import { lock } from 'proper-lockfile';
vi.mock('proper-lockfile');
describe('ProjectRegistry', () => {
let tempDir: string;
let registryPath: string;
let baseDir1: string;
let baseDir2: string;
function normalizePath(p: string): string {
let resolved = path.resolve(p);
if (os.platform() === 'win32') {
resolved = resolved.toLowerCase();
}
return resolved;
}
beforeEach(() => {
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-registry-test-'));
registryPath = path.join(tempDir, 'projects.json');
baseDir1 = path.join(tempDir, 'base1');
baseDir2 = path.join(tempDir, 'base2');
fs.mkdirSync(baseDir1);
fs.mkdirSync(baseDir2);
vi.mocked(lock).mockResolvedValue(vi.fn().mockResolvedValue(undefined));
});
afterEach(() => {
fs.rmSync(tempDir, { recursive: true, force: true });
vi.clearAllMocks();
});
it('generates a short ID from the basename', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const projectPath = path.join(tempDir, 'my-project');
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('my-project');
});
it('slugifies the project name', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const projectPath = path.join(tempDir, 'My Project! @2025');
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('my-project-2025');
});
it('handles collisions with unique suffixes', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const id1 = await registry.getShortId(path.join(tempDir, 'one', 'gemini'));
const id2 = await registry.getShortId(path.join(tempDir, 'two', 'gemini'));
const id3 = await registry.getShortId(
path.join(tempDir, 'three', 'gemini'),
);
expect(id1).toBe('gemini');
expect(id2).toBe('gemini-1');
expect(id3).toBe('gemini-2');
});
it('persists and reloads the registry', async () => {
const projectPath = path.join(tempDir, 'project-a');
const registry1 = new ProjectRegistry(registryPath);
await registry1.initialize();
await registry1.getShortId(projectPath);
const registry2 = new ProjectRegistry(registryPath);
await registry2.initialize();
const id = await registry2.getShortId(projectPath);
expect(id).toBe('project-a');
const data = JSON.parse(fs.readFileSync(registryPath, 'utf8'));
// Use the actual normalized path as key
const normalizedPath = normalizePath(projectPath);
expect(data.projects[normalizedPath]).toBe('project-a');
});
it('normalizes paths', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
const path1 = path.join(tempDir, 'project');
const path2 = path.join(path1, '..', 'project');
const id1 = await registry.getShortId(path1);
const id2 = await registry.getShortId(path2);
expect(id1).toBe(id2);
});
it('creates ownership markers in base directories', async () => {
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
const projectPath = normalizePath(path.join(tempDir, 'project-x'));
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe('project-x');
const marker1 = path.join(baseDir1, shortId, '.project_root');
const marker2 = path.join(baseDir2, shortId, '.project_root');
expect(normalizePath(fs.readFileSync(marker1, 'utf8'))).toBe(projectPath);
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('recovers mapping from disk if registry is missing it', async () => {
// 1. Setup a project with ownership markers
const projectPath = normalizePath(path.join(tempDir, 'project-x'));
const slug = 'project-x';
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(path.join(slugDir, '.project_root'), projectPath);
// 2. Initialize registry (it has no projects.json)
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 3. getShortId should find it from disk
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe(slug);
// 4. It should have populated the markers in other base dirs too
const marker2 = path.join(baseDir2, slug, '.project_root');
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('handles collisions if a slug is taken on disk by another project', async () => {
// 1. project-y takes 'gemini' on disk
const projectY = normalizePath(path.join(tempDir, 'project-y'));
const slug = 'gemini';
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(path.join(slugDir, '.project_root'), projectY);
// 2. project-z tries to get shortId for 'gemini'
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
const projectZ = normalizePath(path.join(tempDir, 'gemini'));
const shortId = await registry.getShortId(projectZ);
// 3. It should avoid 'gemini' and pick 'gemini-1' (or similar)
expect(shortId).not.toBe('gemini');
expect(shortId).toBe('gemini-1');
});
it('invalidates registry mapping if disk ownership changed', async () => {
// 1. Registry thinks my-project owns 'my-project'
const projectPath = normalizePath(path.join(tempDir, 'my-project'));
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: 'my-project',
},
}),
);
// 2. But disk says project-b owns 'my-project'
const slugDir = path.join(baseDir1, 'my-project');
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(
path.join(slugDir, '.project_root'),
normalizePath(path.join(tempDir, 'project-b')),
);
// 3. my-project asks for its ID
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
const id = await registry.getShortId(projectPath);
// 4. It should NOT get 'my-project' because it's owned by project-b on disk.
// It should get 'my-project-1' instead.
expect(id).not.toBe('my-project');
expect(id).toBe('my-project-1');
});
it('repairs missing ownership markers in other base directories', async () => {
const projectPath = normalizePath(path.join(tempDir, 'project-repair'));
const slug = 'repair-me';
// 1. Marker exists in base1 but NOT in base2
const slugDir1 = path.join(baseDir1, slug);
fs.mkdirSync(slugDir1, { recursive: true });
fs.writeFileSync(path.join(slugDir1, '.project_root'), projectPath);
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 2. getShortId should find it and repair base2
const shortId = await registry.getShortId(projectPath);
expect(shortId).toBe(slug);
const marker2 = path.join(baseDir2, slug, '.project_root');
expect(fs.existsSync(marker2)).toBe(true);
expect(normalizePath(fs.readFileSync(marker2, 'utf8'))).toBe(projectPath);
});
it('heals if both markers are missing but registry mapping exists', async () => {
const projectPath = normalizePath(path.join(tempDir, 'project-heal-both'));
const slug = 'heal-both';
// 1. Registry has the mapping
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: slug,
},
}),
);
// 2. No markers on disk
const registry = new ProjectRegistry(registryPath, [baseDir1, baseDir2]);
await registry.initialize();
// 3. getShortId should recreate them
const id = await registry.getShortId(projectPath);
expect(id).toBe(slug);
expect(fs.existsSync(path.join(baseDir1, slug, '.project_root'))).toBe(
true,
);
expect(fs.existsSync(path.join(baseDir2, slug, '.project_root'))).toBe(
true,
);
expect(
normalizePath(
fs.readFileSync(path.join(baseDir1, slug, '.project_root'), 'utf8'),
),
).toBe(projectPath);
});
it('handles corrupted (unreadable) ownership markers by picking a new slug', async () => {
const projectPath = normalizePath(path.join(tempDir, 'corrupt-slug'));
const slug = 'corrupt-slug';
// 1. Marker exists but is owned by someone else
const slugDir = path.join(baseDir1, slug);
fs.mkdirSync(slugDir, { recursive: true });
fs.writeFileSync(
path.join(slugDir, '.project_root'),
normalizePath(path.join(tempDir, 'something-else')),
);
// 2. Registry also thinks we own it
fs.writeFileSync(
registryPath,
JSON.stringify({
projects: {
[projectPath]: slug,
},
}),
);
const registry = new ProjectRegistry(registryPath, [baseDir1]);
await registry.initialize();
// 3. It should see the collision/corruption and pick a new one
const id = await registry.getShortId(projectPath);
expect(id).toBe(`${slug}-1`);
});
it('throws on lock timeout', async () => {
const registry = new ProjectRegistry(registryPath);
await registry.initialize();
vi.mocked(lock).mockRejectedValue(new Error('Lock timeout'));
await expect(registry.getShortId('/foo')).rejects.toThrow('Lock timeout');
expect(lock).toHaveBeenCalledWith(
registryPath,
expect.objectContaining({
retries: expect.any(Object),
}),
);
});
it('throws if not initialized', async () => {
const registry = new ProjectRegistry(registryPath);
await expect(registry.getShortId('/foo')).rejects.toThrow(
'ProjectRegistry must be initialized before use',
);
});
});
+320
View File
@@ -0,0 +1,320 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { lock } from 'proper-lockfile';
import { debugLogger } from '../utils/debugLogger.js';
export interface RegistryData {
projects: Record<string, string>;
}
const PROJECT_ROOT_FILE = '.project_root';
const LOCK_TIMEOUT_MS = 10000;
const LOCK_RETRY_DELAY_MS = 100;
/**
* Manages a mapping between absolute project paths and short, human-readable identifiers.
* This helps reduce context bloat and makes temporary directories easier to work with.
*/
export class ProjectRegistry {
private readonly registryPath: string;
private readonly baseDirs: string[];
private data: RegistryData | undefined;
private initPromise: Promise<void> | undefined;
constructor(registryPath: string, baseDirs: string[] = []) {
this.registryPath = registryPath;
this.baseDirs = baseDirs;
}
/**
* Initializes the registry by loading data from disk.
*/
async initialize(): Promise<void> {
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = (async () => {
if (this.data) {
return;
}
this.data = await this.loadData();
})();
return this.initPromise;
}
private async loadData(): Promise<RegistryData> {
if (!fs.existsSync(this.registryPath)) {
return { projects: {} };
}
try {
const content = await fs.promises.readFile(this.registryPath, 'utf8');
return JSON.parse(content);
} catch (e) {
debugLogger.debug('Failed to load registry: ', e);
// If the registry is corrupted, we'll start fresh to avoid blocking the CLI
return { projects: {} };
}
}
private normalizePath(projectPath: string): string {
let resolved = path.resolve(projectPath);
if (os.platform() === 'win32') {
resolved = resolved.toLowerCase();
}
return resolved;
}
private async save(data: RegistryData): Promise<void> {
const dir = path.dirname(this.registryPath);
if (!fs.existsSync(dir)) {
await fs.promises.mkdir(dir, { recursive: true });
}
try {
const content = JSON.stringify(data, null, 2);
const tmpPath = `${this.registryPath}.tmp`;
await fs.promises.writeFile(tmpPath, content, 'utf8');
await fs.promises.rename(tmpPath, this.registryPath);
} catch (error) {
debugLogger.error(
`Failed to save project registry to ${this.registryPath}:`,
error,
);
}
}
/**
* Returns a short identifier for the given project path.
* If the project is not already in the registry, a new identifier is generated and saved.
*/
async getShortId(projectPath: string): Promise<string> {
if (!this.data) {
throw new Error('ProjectRegistry must be initialized before use');
}
const normalizedPath = this.normalizePath(projectPath);
// Ensure directory exists so we can create a lock file
const dir = path.dirname(this.registryPath);
if (!fs.existsSync(dir)) {
await fs.promises.mkdir(dir, { recursive: true });
}
// Ensure the registry file exists so proper-lockfile can lock it
if (!fs.existsSync(this.registryPath)) {
await this.save({ projects: {} });
}
// Use proper-lockfile to prevent racy updates
const release = await lock(this.registryPath, {
retries: {
retries: Math.floor(LOCK_TIMEOUT_MS / LOCK_RETRY_DELAY_MS),
minTimeout: LOCK_RETRY_DELAY_MS,
},
});
try {
// Re-load data under lock to get the latest state
const currentData = await this.loadData();
this.data = currentData;
let shortId: string | undefined = currentData.projects[normalizedPath];
// If we have a mapping, verify it against the folders on disk
if (shortId) {
if (await this.verifySlugOwnership(shortId, normalizedPath)) {
// HEAL: If it passed verification but markers are missing (e.g. new base dir or deleted marker), recreate them.
await this.ensureOwnershipMarkers(shortId, normalizedPath);
return shortId;
}
// If verification fails, it means the registry is out of sync or someone else took it.
// We'll remove the mapping and find/generate a new one.
delete currentData.projects[normalizedPath];
}
// Try to find if this project already has folders assigned that we didn't know about
shortId = await this.findExistingSlugForPath(normalizedPath);
if (!shortId) {
// Generate a new one
shortId = await this.claimNewSlug(normalizedPath, currentData.projects);
}
currentData.projects[normalizedPath] = shortId;
await this.save(currentData);
return shortId;
} finally {
await release();
}
}
private async verifySlugOwnership(
slug: string,
projectPath: string,
): Promise<boolean> {
if (this.baseDirs.length === 0) {
return true; // Nothing to verify against
}
for (const baseDir of this.baseDirs) {
const markerPath = path.join(baseDir, slug, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
try {
const owner = (await fs.promises.readFile(markerPath, 'utf8')).trim();
if (this.normalizePath(owner) !== this.normalizePath(projectPath)) {
return false;
}
} catch (e) {
debugLogger.debug(
`Failed to read ownership marker ${markerPath}:`,
e,
);
// If we can't read it, assume it's not ours or corrupted.
return false;
}
}
}
return true;
}
private async findExistingSlugForPath(
projectPath: string,
): Promise<string | undefined> {
if (this.baseDirs.length === 0) {
return undefined;
}
const normalizedTarget = this.normalizePath(projectPath);
// Scan all base dirs to see if any slug already belongs to this project
for (const baseDir of this.baseDirs) {
if (!fs.existsSync(baseDir)) {
continue;
}
try {
const candidates = await fs.promises.readdir(baseDir);
for (const candidate of candidates) {
const markerPath = path.join(baseDir, candidate, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
const owner = (
await fs.promises.readFile(markerPath, 'utf8')
).trim();
if (this.normalizePath(owner) === normalizedTarget) {
// Found it! Ensure all base dirs have the marker
await this.ensureOwnershipMarkers(candidate, normalizedTarget);
return candidate;
}
}
}
} catch (e) {
debugLogger.debug(`Failed to scan base dir ${baseDir}:`, e);
}
}
return undefined;
}
private async claimNewSlug(
projectPath: string,
existingMappings: Record<string, string>,
): Promise<string> {
const baseName = path.basename(projectPath) || 'project';
const slug = this.slugify(baseName);
let counter = 0;
const existingIds = new Set(Object.values(existingMappings));
while (true) {
const candidate = counter === 0 ? slug : `${slug}-${counter}`;
counter++;
// Check if taken in registry
if (existingIds.has(candidate)) {
continue;
}
// Check if taken on disk
let diskCollision = false;
for (const baseDir of this.baseDirs) {
const markerPath = path.join(baseDir, candidate, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
try {
const owner = (
await fs.promises.readFile(markerPath, 'utf8')
).trim();
if (this.normalizePath(owner) !== this.normalizePath(projectPath)) {
diskCollision = true;
break;
}
} catch (_e) {
// If we can't read it, assume it's someone else's to be safe
diskCollision = true;
break;
}
}
}
if (diskCollision) {
continue;
}
// Try to claim it
try {
await this.ensureOwnershipMarkers(candidate, projectPath);
return candidate;
} catch (_e) {
// Someone might have claimed it between our check and our write.
// Try next candidate.
continue;
}
}
}
private async ensureOwnershipMarkers(
slug: string,
projectPath: string,
): Promise<void> {
const normalizedProject = this.normalizePath(projectPath);
for (const baseDir of this.baseDirs) {
const slugDir = path.join(baseDir, slug);
if (!fs.existsSync(slugDir)) {
await fs.promises.mkdir(slugDir, { recursive: true });
}
const markerPath = path.join(slugDir, PROJECT_ROOT_FILE);
if (fs.existsSync(markerPath)) {
const owner = (await fs.promises.readFile(markerPath, 'utf8')).trim();
if (this.normalizePath(owner) === normalizedProject) {
continue;
}
// Collision!
throw new Error(`Slug ${slug} is already owned by ${owner}`);
}
// Use flag: 'wx' to ensure atomic creation
await fs.promises.writeFile(markerPath, normalizedProject, {
encoding: 'utf8',
flag: 'wx',
});
}
}
private slugify(text: string): string {
return (
text
.toLowerCase()
.replace(/[^a-z0-9]/g, '-')
.replace(/-+/g, '-')
.replace(/^-|-$/g, '') || 'project'
);
}
}
+54 -2
View File
@@ -4,7 +4,12 @@
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, vi, afterEach } from 'vitest';
import { beforeEach, describe, it, expect, vi, afterEach } from 'vitest';
vi.unmock('./storage.js');
vi.unmock('./projectRegistry.js');
vi.unmock('./storageMigration.js');
import * as os from 'node:os';
import * as path from 'node:path';
@@ -18,6 +23,52 @@ vi.mock('fs', async (importOriginal) => {
import { Storage } from './storage.js';
import { GEMINI_DIR, homedir } from '../utils/paths.js';
import { ProjectRegistry } from './projectRegistry.js';
import { StorageMigration } from './storageMigration.js';
const PROJECT_SLUG = 'project-slug';
vi.mock('./projectRegistry.js');
vi.mock('./storageMigration.js');
describe('Storage initialize', () => {
const projectRoot = '/tmp/project';
let storage: Storage;
beforeEach(() => {
ProjectRegistry.prototype.initialize = vi.fn().mockResolvedValue(undefined);
ProjectRegistry.prototype.getShortId = vi
.fn()
.mockReturnValue(PROJECT_SLUG);
storage = new Storage(projectRoot);
vi.clearAllMocks();
// Mock StorageMigration.migrateDirectory
vi.mocked(StorageMigration.migrateDirectory).mockResolvedValue(undefined);
});
it('sets up the registry and performs migration if `getProjectTempDir` is called', async () => {
await storage.initialize();
expect(storage.getProjectTempDir()).toBe(
path.join(os.homedir(), GEMINI_DIR, 'tmp', PROJECT_SLUG),
);
// Verify registry initialization
expect(ProjectRegistry).toHaveBeenCalled();
expect(vi.mocked(ProjectRegistry).prototype.initialize).toHaveBeenCalled();
expect(
vi.mocked(ProjectRegistry).prototype.getShortId,
).toHaveBeenCalledWith(projectRoot);
// Verify migration calls
const shortId = 'project-slug';
// We can't easily get the hash here without repeating logic, but we can verify it's called twice
expect(StorageMigration.migrateDirectory).toHaveBeenCalledTimes(2);
// Verify identifier is set by checking a path
expect(storage.getProjectTempDir()).toContain(shortId);
});
});
vi.mock('../utils/paths.js', async (importOriginal) => {
const actual = await importOriginal<typeof import('../utils/paths.js')>();
@@ -103,7 +154,8 @@ describe('Storage additional helpers', () => {
expect(Storage.getGlobalBinDir()).toBe(expected);
});
it('getProjectTempPlansDir returns ~/.gemini/tmp/<hash>/plans', () => {
it('getProjectTempPlansDir returns ~/.gemini/tmp/<identifier>/plans', async () => {
await storage.initialize();
const tempDir = storage.getProjectTempDir();
const expected = path.join(tempDir, 'plans');
expect(storage.getProjectTempPlansDir()).toBe(expected);
+66 -5
View File
@@ -9,6 +9,8 @@ import * as os from 'node:os';
import * as crypto from 'node:crypto';
import * as fs from 'node:fs';
import { GEMINI_DIR, homedir } from '../utils/paths.js';
import { ProjectRegistry } from './projectRegistry.js';
import { StorageMigration } from './storageMigration.js';
export const GOOGLE_ACCOUNTS_FILENAME = 'google_accounts.json';
export const OAUTH_FILE = 'oauth_creds.json';
@@ -18,6 +20,8 @@ const AGENTS_DIR_NAME = '.agents';
export class Storage {
private readonly targetDir: string;
private projectIdentifier: string | undefined;
private initPromise: Promise<void> | undefined;
constructor(targetDir: string) {
this.targetDir = targetDir;
@@ -125,9 +129,9 @@ export class Storage {
}
getProjectTempDir(): string {
const hash = this.getFilePathHash(this.getProjectRoot());
const identifier = this.getProjectIdentifier();
const tempDir = Storage.getGlobalTempDir();
return path.join(tempDir, hash);
return path.join(tempDir, identifier);
}
ensureProjectTempDirExists(): void {
@@ -146,10 +150,67 @@ export class Storage {
return crypto.createHash('sha256').update(filePath).digest('hex');
}
getHistoryDir(): string {
const hash = this.getFilePathHash(this.getProjectRoot());
private getProjectIdentifier(): string {
if (!this.projectIdentifier) {
throw new Error('Storage must be initialized before use');
}
return this.projectIdentifier;
}
/**
* Initializes storage by setting up the project registry and performing migrations.
*/
async initialize(): Promise<void> {
if (this.initPromise) {
return this.initPromise;
}
this.initPromise = (async () => {
if (this.projectIdentifier) {
return;
}
const registryPath = path.join(
Storage.getGlobalGeminiDir(),
'projects.json',
);
const registry = new ProjectRegistry(registryPath, [
Storage.getGlobalTempDir(),
path.join(Storage.getGlobalGeminiDir(), 'history'),
]);
await registry.initialize();
this.projectIdentifier = await registry.getShortId(this.getProjectRoot());
await this.performMigration();
})();
return this.initPromise;
}
/**
* Performs migration of legacy hash-based directories to the new slug-based format.
* This is called internally by initialize().
*/
private async performMigration(): Promise<void> {
const shortId = this.getProjectIdentifier();
const oldHash = this.getFilePathHash(this.getProjectRoot());
// Migrate Temp Dir
const newTempDir = path.join(Storage.getGlobalTempDir(), shortId);
const oldTempDir = path.join(Storage.getGlobalTempDir(), oldHash);
await StorageMigration.migrateDirectory(oldTempDir, newTempDir);
// Migrate History Dir
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
return path.join(historyDir, hash);
const newHistoryDir = path.join(historyDir, shortId);
const oldHistoryDir = path.join(historyDir, oldHash);
await StorageMigration.migrateDirectory(oldHistoryDir, newHistoryDir);
}
getHistoryDir(): string {
const identifier = this.getProjectIdentifier();
const historyDir = path.join(Storage.getGlobalGeminiDir(), 'history');
return path.join(historyDir, identifier);
}
getWorkspaceSettingsPath(): string {
@@ -0,0 +1,77 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
vi.unmock('./storageMigration.js');
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { StorageMigration } from './storageMigration.js';
describe('StorageMigration', () => {
let tempDir: string;
beforeEach(() => {
tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'gemini-migration-test-'));
});
afterEach(() => {
fs.rmSync(tempDir, { recursive: true, force: true });
vi.restoreAllMocks();
});
it('migrates a directory from old to new path (non-destructively)', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'new-slug');
fs.mkdirSync(oldPath);
fs.writeFileSync(path.join(oldPath, 'test.txt'), 'hello');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(true);
expect(fs.existsSync(oldPath)).toBe(true); // Should still exist
expect(fs.readFileSync(path.join(newPath, 'test.txt'), 'utf8')).toBe(
'hello',
);
});
it('does nothing if old path does not exist', async () => {
const oldPath = path.join(tempDir, 'non-existent');
const newPath = path.join(tempDir, 'new-slug');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(false);
});
it('does nothing if new path already exists', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'new-slug');
fs.mkdirSync(oldPath);
fs.mkdirSync(newPath);
fs.writeFileSync(path.join(oldPath, 'old.txt'), 'old');
fs.writeFileSync(path.join(newPath, 'new.txt'), 'new');
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(oldPath)).toBe(true);
expect(fs.existsSync(path.join(newPath, 'new.txt'))).toBe(true);
expect(fs.existsSync(path.join(newPath, 'old.txt'))).toBe(false);
});
it('creates parent directory for new path if it does not exist', async () => {
const oldPath = path.join(tempDir, 'old-hash');
const newPath = path.join(tempDir, 'sub', 'new-slug');
fs.mkdirSync(oldPath);
await StorageMigration.migrateDirectory(oldPath, newPath);
expect(fs.existsSync(newPath)).toBe(true);
expect(fs.existsSync(oldPath)).toBe(true); // Should still exist
});
});
@@ -0,0 +1,44 @@
/**
* @license
* Copyright 2025 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import * as fs from 'node:fs';
import * as path from 'node:path';
import { debugLogger } from '../utils/debugLogger.js';
/**
* Migration utility to move data from old hash-based directories to new slug-based directories.
*/
export class StorageMigration {
/**
* Migrates a directory from an old path to a new path if the old one exists and the new one doesn't.
* @param oldPath The old directory path (hash-based).
* @param newPath The new directory path (slug-based).
*/
static async migrateDirectory(
oldPath: string,
newPath: string,
): Promise<void> {
try {
// If the new path already exists, we consider migration done or skipped to avoid overwriting.
// If the old path doesn't exist, there's nothing to migrate.
if (fs.existsSync(newPath) || !fs.existsSync(oldPath)) {
return;
}
// Ensure the parent directory of the new path exists
const parentDir = path.dirname(newPath);
await fs.promises.mkdir(parentDir, { recursive: true });
// Copy (safer and handles cross-device moves)
await fs.promises.cp(oldPath, newPath, { recursive: true });
} catch (e) {
debugLogger.debug(
`Storage Migration: Failed to move ${oldPath} to ${newPath}:`,
e,
);
}
}
}
+7 -5
View File
@@ -25,19 +25,21 @@ import { Storage } from '../config/storage.js';
import { promises as fs, existsSync } from 'node:fs';
import path from 'node:path';
import type { Content } from '@google/genai';
import crypto from 'node:crypto';
import os from 'node:os';
import { GEMINI_DIR } from '../utils/paths.js';
import { debugLogger } from '../utils/debugLogger.js';
const PROJECT_SLUG = 'project-slug';
const TMP_DIR_NAME = 'tmp';
const LOG_FILE_NAME = 'logs.json';
const CHECKPOINT_FILE_NAME = 'checkpoint.json';
const projectDir = process.cwd();
const hash = crypto.createHash('sha256').update(projectDir).digest('hex');
const TEST_GEMINI_DIR = path.join(os.homedir(), GEMINI_DIR, TMP_DIR_NAME, hash);
const TEST_GEMINI_DIR = path.join(
os.homedir(),
GEMINI_DIR,
TMP_DIR_NAME,
PROJECT_SLUG,
);
const TEST_LOG_FILE_PATH = path.join(TEST_GEMINI_DIR, LOG_FILE_NAME);
const TEST_CHECKPOINT_FILE_PATH = path.join(
+1
View File
@@ -141,6 +141,7 @@ export class Logger {
return;
}
await this.storage.initialize();
this.geminiDir = this.storage.getProjectTempDir();
this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
+2
View File
@@ -12,6 +12,8 @@ import type { PolicySettings } from './types.js';
import { ApprovalMode, PolicyDecision, InProcessCheckerType } from './types.js';
import { isDirectorySecure } from '../utils/security.js';
vi.unmock('../config/storage.js');
vi.mock('../utils/security.js', () => ({
isDirectorySecure: vi.fn().mockResolvedValue({ secure: true }),
}));
+5 -10
View File
@@ -18,13 +18,11 @@ import { Storage } from '../config/storage.js';
import * as path from 'node:path';
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import {
getProjectHash,
GEMINI_DIR,
homedir as pathsHomedir,
} from '../utils/paths.js';
import { GEMINI_DIR, homedir as pathsHomedir } from '../utils/paths.js';
import { spawnAsync } from '../utils/shell-utils.js';
const PROJECT_SLUG = 'project-slug';
vi.mock('../utils/shell-utils.js', () => ({
spawnAsync: vi.fn(),
}));
@@ -85,7 +83,6 @@ describe('GitService', () => {
let testRootDir: string;
let projectRoot: string;
let homedir: string;
let hash: string;
let storage: Storage;
beforeEach(async () => {
@@ -95,8 +92,6 @@ describe('GitService', () => {
await fs.mkdir(projectRoot, { recursive: true });
await fs.mkdir(homedir, { recursive: true });
hash = getProjectHash(projectRoot);
vi.clearAllMocks();
hoistedIsGitRepositoryMock.mockReturnValue(true);
(spawnAsync as Mock).mockResolvedValue({
@@ -181,8 +176,8 @@ describe('GitService', () => {
let repoDir: string;
let gitConfigPath: string;
beforeEach(() => {
repoDir = path.join(homedir, GEMINI_DIR, 'history', hash);
beforeEach(async () => {
repoDir = path.join(homedir, GEMINI_DIR, 'history', PROJECT_SLUG);
gitConfigPath = path.join(repoDir, '.gitconfig');
});
+1
View File
@@ -33,6 +33,7 @@ export class GitService {
'Checkpointing is enabled, but Git is not installed. Please install Git or disable checkpointing to continue.',
);
}
await this.storage.initialize();
try {
await this.setupShadowGitRepository();
} catch (error) {