mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-04-03 01:40:59 -07:00
fix(core): refactor linux sandbox to fix ARG_MAX crashes (#24286)
This commit is contained in:
@@ -6,10 +6,8 @@
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { LinuxSandboxManager } from './LinuxSandboxManager.js';
|
||||
import type { SandboxRequest } from '../../services/sandboxManager.js';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import * as shellUtils from '../../utils/shell-utils.js';
|
||||
|
||||
vi.mock('node:fs', async () => {
|
||||
const actual = await vi.importActual<typeof import('node:fs')>('node:fs');
|
||||
@@ -74,111 +72,63 @@ describe('LinuxSandboxManager', () => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
const getBwrapArgs = async (
|
||||
req: SandboxRequest,
|
||||
customManager?: LinuxSandboxManager,
|
||||
) => {
|
||||
const mgr = customManager || manager;
|
||||
const result = await mgr.prepareCommand(req);
|
||||
expect(result.program).toBe('sh');
|
||||
expect(result.args[0]).toBe('-c');
|
||||
expect(result.args[1]).toBe(
|
||||
'bpf_path="$1"; shift; exec bwrap "$@" 9< "$bpf_path"',
|
||||
);
|
||||
expect(result.args[2]).toBe('_');
|
||||
expect(result.args[3]).toMatch(/gemini-cli-seccomp-.*\.bpf$/);
|
||||
return result.args.slice(4);
|
||||
};
|
||||
|
||||
describe('prepareCommand', () => {
|
||||
it('should correctly format the base command and args', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
it('wraps the command and arguments correctly using a temporary file', async () => {
|
||||
const result = await manager.prepareCommand({
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
env: { PATH: '/usr/bin' },
|
||||
});
|
||||
|
||||
expect(bwrapArgs).toEqual([
|
||||
'--unshare-all',
|
||||
'--new-session',
|
||||
'--die-with-parent',
|
||||
'--ro-bind',
|
||||
'/',
|
||||
'/',
|
||||
'--dev',
|
||||
'/dev',
|
||||
'--proc',
|
||||
'/proc',
|
||||
'--tmpfs',
|
||||
'/tmp',
|
||||
'--ro-bind-try',
|
||||
workspace,
|
||||
workspace,
|
||||
'--ro-bind',
|
||||
`${workspace}/.gitignore`,
|
||||
`${workspace}/.gitignore`,
|
||||
'--ro-bind',
|
||||
`${workspace}/.geminiignore`,
|
||||
`${workspace}/.geminiignore`,
|
||||
'--ro-bind',
|
||||
`${workspace}/.git`,
|
||||
`${workspace}/.git`,
|
||||
'--seccomp',
|
||||
'9',
|
||||
'--',
|
||||
'ls',
|
||||
'-la',
|
||||
]);
|
||||
});
|
||||
|
||||
it('binds workspace read-write when readonly is false', async () => {
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
modeConfig: { readonly: false },
|
||||
});
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
},
|
||||
customManager,
|
||||
expect(result.program).toBe('sh');
|
||||
expect(result.args[0]).toBe('-c');
|
||||
expect(result.args[1]).toContain(
|
||||
'exec bwrap --args 8 "$@" 8< "$args_path" 9< "$bpf_path"',
|
||||
);
|
||||
|
||||
expect(bwrapArgs).toContain('--bind-try');
|
||||
expect(bwrapArgs).toContain(workspace);
|
||||
expect(result.args[result.args.length - 3]).toBe('--');
|
||||
expect(result.args[result.args.length - 2]).toBe('ls');
|
||||
expect(result.args[result.args.length - 1]).toBe('-la');
|
||||
expect(result.env['PATH']).toBe('/usr/bin');
|
||||
});
|
||||
|
||||
it('maps network permissions to --share-net', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'curl',
|
||||
it('cleans up the temporary arguments file', async () => {
|
||||
const result = await manager.prepareCommand({
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: { additionalPermissions: { network: true } },
|
||||
});
|
||||
|
||||
expect(bwrapArgs).toContain('--share-net');
|
||||
expect(result.cleanup).toBeDefined();
|
||||
result.cleanup!();
|
||||
|
||||
expect(fs.unlinkSync).toHaveBeenCalled();
|
||||
const unlinkCall = vi.mocked(fs.unlinkSync).mock.calls[0];
|
||||
expect(unlinkCall[0]).toMatch(/gemini-cli-bwrap-args-.*\.args$/);
|
||||
});
|
||||
|
||||
it('maps explicit write permissions to --bind-try', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'touch',
|
||||
args: [],
|
||||
it('translates virtual commands', async () => {
|
||||
const readResult = await manager.prepareCommand({
|
||||
command: '__read',
|
||||
args: [path.join(workspace, 'file.txt')],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
additionalPermissions: {
|
||||
fileSystem: { write: ['/home/user/workspace/out/dir'] },
|
||||
},
|
||||
},
|
||||
});
|
||||
// Length is 8: ['-c', '...', '_', bpf, args, '--', '/bin/cat', file]
|
||||
expect(readResult.args[readResult.args.length - 2]).toBe('/bin/cat');
|
||||
|
||||
const index = bwrapArgs.indexOf('--bind-try');
|
||||
expect(index).not.toBe(-1);
|
||||
expect(bwrapArgs[index + 1]).toBe('/home/user/workspace/out/dir');
|
||||
const writeResult = await manager.prepareCommand({
|
||||
command: '__write',
|
||||
args: [path.join(workspace, 'file.txt')],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
// Length is 11: ['-c', '...', '_', bpf, args, '--', '/bin/sh', '-c', '...', '_', file]
|
||||
expect(writeResult.args[writeResult.args.length - 5]).toBe('/bin/sh');
|
||||
expect(writeResult.args[writeResult.args.length - 1]).toBe(
|
||||
path.join(workspace, 'file.txt'),
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects overrides in plan mode', async () => {
|
||||
@@ -192,413 +142,9 @@ describe('LinuxSandboxManager', () => {
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: { additionalPermissions: { network: true } },
|
||||
policy: { networkAccess: true },
|
||||
}),
|
||||
).rejects.toThrow(
|
||||
/Cannot override readonly\/network\/filesystem restrictions in Plan mode/,
|
||||
);
|
||||
).rejects.toThrow(/Cannot override/);
|
||||
});
|
||||
|
||||
it('should correctly pass through the cwd to the resulting command', async () => {
|
||||
const req: SandboxRequest = {
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: '/different/cwd',
|
||||
env: {},
|
||||
};
|
||||
|
||||
const result = await manager.prepareCommand(req);
|
||||
|
||||
expect(result.cwd).toBe('/different/cwd');
|
||||
});
|
||||
|
||||
it('should apply environment sanitization via the default mechanisms', async () => {
|
||||
const req: SandboxRequest = {
|
||||
command: 'test',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {
|
||||
API_KEY: 'secret',
|
||||
PATH: '/usr/bin',
|
||||
},
|
||||
policy: {
|
||||
sanitizationConfig: {
|
||||
allowedEnvironmentVariables: ['PATH'],
|
||||
blockedEnvironmentVariables: ['API_KEY'],
|
||||
enableEnvironmentVariableRedaction: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const result = await manager.prepareCommand(req);
|
||||
expect(result.env['PATH']).toBe('/usr/bin');
|
||||
expect(result.env['API_KEY']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should allow network when networkAccess is true', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
networkAccess: true,
|
||||
},
|
||||
});
|
||||
|
||||
expect(bwrapArgs).toContain('--share-net');
|
||||
});
|
||||
|
||||
describe('governance files', () => {
|
||||
it('should ensure governance files exist', async () => {
|
||||
vi.mocked(fs.existsSync).mockReturnValue(false);
|
||||
|
||||
await getBwrapArgs({
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
|
||||
expect(fs.mkdirSync).toHaveBeenCalled();
|
||||
expect(fs.openSync).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should protect both the symlink and the real path if they differ', async () => {
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p.toString() === `${workspace}/.gitignore`)
|
||||
return '/shared/global.gitignore';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
|
||||
expect(bwrapArgs).toContain('--ro-bind');
|
||||
expect(bwrapArgs).toContain(`${workspace}/.gitignore`);
|
||||
expect(bwrapArgs).toContain('/shared/global.gitignore');
|
||||
|
||||
// Check that both are bound
|
||||
const gitignoreIndex = bwrapArgs.indexOf(`${workspace}/.gitignore`);
|
||||
expect(bwrapArgs[gitignoreIndex - 1]).toBe('--ro-bind');
|
||||
expect(bwrapArgs[gitignoreIndex + 1]).toBe(`${workspace}/.gitignore`);
|
||||
|
||||
const realGitignoreIndex = bwrapArgs.indexOf(
|
||||
'/shared/global.gitignore',
|
||||
);
|
||||
expect(bwrapArgs[realGitignoreIndex - 1]).toBe('--ro-bind');
|
||||
expect(bwrapArgs[realGitignoreIndex + 1]).toBe(
|
||||
'/shared/global.gitignore',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('allowedPaths', () => {
|
||||
it('should parameterize allowed paths and normalize them', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'node',
|
||||
args: ['script.js'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
allowedPaths: ['/tmp/cache', '/opt/tools', workspace],
|
||||
},
|
||||
});
|
||||
|
||||
expect(bwrapArgs).toContain('--bind-try');
|
||||
expect(bwrapArgs[bwrapArgs.indexOf('/tmp/cache') - 1]).toBe(
|
||||
'--bind-try',
|
||||
);
|
||||
expect(bwrapArgs[bwrapArgs.indexOf('/opt/tools') - 1]).toBe(
|
||||
'--bind-try',
|
||||
);
|
||||
});
|
||||
|
||||
it('should grant read-write access to allowedPaths inside the workspace even when readonly mode is active', async () => {
|
||||
const manager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
modeConfig: { readonly: true },
|
||||
});
|
||||
const result = await manager.prepareCommand({
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
allowedPaths: [workspace + '/subdirectory'],
|
||||
},
|
||||
});
|
||||
const bwrapArgs = result.args;
|
||||
const bindIndex = bwrapArgs.indexOf(workspace + '/subdirectory');
|
||||
expect(bwrapArgs[bindIndex - 1]).toBe('--bind-try');
|
||||
});
|
||||
|
||||
it('should not bind the workspace twice even if it has a trailing slash in allowedPaths', async () => {
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
allowedPaths: [workspace + '/'],
|
||||
},
|
||||
});
|
||||
|
||||
const binds = bwrapArgs.filter((a) => a === workspace);
|
||||
expect(binds.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should bind the parent directory of a non-existent path', async () => {
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
if (p === '/home/user/workspace/new-file.txt') return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: '__write',
|
||||
args: ['/home/user/workspace/new-file.txt'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
allowedPaths: ['/home/user/workspace/new-file.txt'],
|
||||
},
|
||||
});
|
||||
|
||||
const parentDir = '/home/user/workspace';
|
||||
const bindIndex = bwrapArgs.lastIndexOf(parentDir);
|
||||
expect(bindIndex).not.toBe(-1);
|
||||
expect(bwrapArgs[bindIndex - 2]).toBe('--bind-try');
|
||||
});
|
||||
});
|
||||
|
||||
describe('virtual commands', () => {
|
||||
it('should translate __read to cat', async () => {
|
||||
const testFile = path.join(workspace, 'file.txt');
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: '__read',
|
||||
args: [testFile],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
|
||||
// args are: [...bwrapBaseArgs, '--', '/bin/cat', '.../file.txt']
|
||||
expect(bwrapArgs[bwrapArgs.length - 2]).toBe('/bin/cat');
|
||||
expect(bwrapArgs[bwrapArgs.length - 1]).toBe(testFile);
|
||||
});
|
||||
|
||||
it('should translate __write to sh -c cat', async () => {
|
||||
const testFile = path.join(workspace, 'file.txt');
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: '__write',
|
||||
args: [testFile],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
|
||||
// args are: [...bwrapBaseArgs, '--', '/bin/sh', '-c', 'tee -- "$@" > /dev/null', '_', '.../file.txt']
|
||||
expect(bwrapArgs[bwrapArgs.length - 5]).toBe('/bin/sh');
|
||||
expect(bwrapArgs[bwrapArgs.length - 4]).toBe('-c');
|
||||
expect(bwrapArgs[bwrapArgs.length - 3]).toBe('tee -- "$@" > /dev/null');
|
||||
expect(bwrapArgs[bwrapArgs.length - 2]).toBe('_');
|
||||
expect(bwrapArgs[bwrapArgs.length - 1]).toBe(testFile);
|
||||
});
|
||||
});
|
||||
|
||||
describe('forbiddenPaths', () => {
|
||||
it('should parameterize forbidden paths and explicitly deny them', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation((p) => {
|
||||
if (p.toString().includes('cache')) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return { isDirectory: () => false } as fs.Stats;
|
||||
});
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => p.toString());
|
||||
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
forbiddenPaths: async () => ['/tmp/cache', '/opt/secret.txt'],
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
},
|
||||
customManager,
|
||||
);
|
||||
|
||||
const cacheIndex = bwrapArgs.indexOf('/tmp/cache');
|
||||
expect(bwrapArgs[cacheIndex - 1]).toBe('--tmpfs');
|
||||
|
||||
const secretIndex = bwrapArgs.indexOf('/opt/secret.txt');
|
||||
expect(bwrapArgs[secretIndex - 2]).toBe('--ro-bind');
|
||||
expect(bwrapArgs[secretIndex - 1]).toBe('/dev/null');
|
||||
});
|
||||
|
||||
it('resolves forbidden symlink paths to their real paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => false }) as fs.Stats,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === '/tmp/forbidden-symlink') return '/opt/real-target.txt';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
forbiddenPaths: async () => ['/tmp/forbidden-symlink'],
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
},
|
||||
customManager,
|
||||
);
|
||||
|
||||
const secretIndex = bwrapArgs.indexOf('/opt/real-target.txt');
|
||||
expect(bwrapArgs[secretIndex - 2]).toBe('--ro-bind');
|
||||
expect(bwrapArgs[secretIndex - 1]).toBe('/dev/null');
|
||||
});
|
||||
|
||||
it('explicitly denies non-existent forbidden paths to prevent creation', async () => {
|
||||
const error = new Error('File not found') as NodeJS.ErrnoException;
|
||||
error.code = 'ENOENT';
|
||||
vi.mocked(fs.statSync).mockImplementation(() => {
|
||||
throw error;
|
||||
});
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => p.toString());
|
||||
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
forbiddenPaths: async () => ['/tmp/not-here.txt'],
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
},
|
||||
customManager,
|
||||
);
|
||||
|
||||
const idx = bwrapArgs.indexOf('/tmp/not-here.txt');
|
||||
expect(bwrapArgs[idx - 2]).toBe('--symlink');
|
||||
expect(bwrapArgs[idx - 1]).toBe('/dev/null');
|
||||
});
|
||||
|
||||
it('masks directory symlinks with tmpfs for both paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => true }) as fs.Stats,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === '/tmp/dir-link') return '/opt/real-dir';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
forbiddenPaths: async () => ['/tmp/dir-link'],
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
},
|
||||
customManager,
|
||||
);
|
||||
|
||||
const idx = bwrapArgs.indexOf('/opt/real-dir');
|
||||
expect(bwrapArgs[idx - 1]).toBe('--tmpfs');
|
||||
});
|
||||
|
||||
it('should override allowed paths if a path is also in forbidden paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => true }) as fs.Stats,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => p.toString());
|
||||
|
||||
const customManager = new LinuxSandboxManager({
|
||||
workspace,
|
||||
forbiddenPaths: async () => ['/tmp/conflict'],
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs(
|
||||
{
|
||||
command: 'ls',
|
||||
args: ['-la'],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
policy: {
|
||||
allowedPaths: ['/tmp/conflict'],
|
||||
},
|
||||
},
|
||||
customManager,
|
||||
);
|
||||
|
||||
// Conflict should have been filtered out of allow list (--bind-try)
|
||||
expect(bwrapArgs).not.toContain('--bind-try');
|
||||
expect(bwrapArgs).not.toContain('--bind-try-ro');
|
||||
|
||||
// It should only appear as a forbidden path (via --tmpfs)
|
||||
const conflictIdx = bwrapArgs.indexOf('/tmp/conflict');
|
||||
expect(conflictIdx).toBeGreaterThan(0);
|
||||
expect(bwrapArgs[conflictIdx - 1]).toBe('--tmpfs');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('blocks .env and .env.* files in the workspace root', async () => {
|
||||
vi.mocked(shellUtils.spawnAsync).mockImplementation((cmd, args) => {
|
||||
if (cmd === 'find' && args?.[0] === workspace) {
|
||||
// Assert that find is NOT excluding dotfiles
|
||||
expect(args).not.toContain('-not');
|
||||
expect(args).toContain('-prune');
|
||||
|
||||
return Promise.resolve({
|
||||
status: 0,
|
||||
stdout: Buffer.from(
|
||||
`${workspace}/.env\0${workspace}/.env.local\0${workspace}/.env.test\0`,
|
||||
),
|
||||
} as unknown as ReturnType<typeof shellUtils.spawnAsync>);
|
||||
}
|
||||
return Promise.resolve({
|
||||
status: 0,
|
||||
stdout: Buffer.from(''),
|
||||
} as unknown as ReturnType<typeof shellUtils.spawnAsync>);
|
||||
});
|
||||
|
||||
const bwrapArgs = await getBwrapArgs({
|
||||
command: 'ls',
|
||||
args: [],
|
||||
cwd: workspace,
|
||||
env: {},
|
||||
});
|
||||
|
||||
const bindsIndex = bwrapArgs.indexOf('--seccomp');
|
||||
const binds = bwrapArgs.slice(0, bindsIndex);
|
||||
|
||||
expect(binds).toContain(`${workspace}/.env`);
|
||||
expect(binds).toContain(`${workspace}/.env.local`);
|
||||
expect(binds).toContain(`${workspace}/.env.test`);
|
||||
|
||||
// Verify they are bound to a mask file
|
||||
const envIndex = binds.indexOf(`${workspace}/.env`);
|
||||
expect(binds[envIndex - 2]).toBe('--bind');
|
||||
expect(binds[envIndex - 1]).toMatch(/gemini-cli-mask-file-.*mocked\/mask/);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import { join, dirname, normalize } from 'node:path';
|
||||
import { join, dirname } from 'node:path';
|
||||
import os from 'node:os';
|
||||
import {
|
||||
type SandboxManager,
|
||||
@@ -14,8 +14,6 @@ import {
|
||||
type SandboxedCommand,
|
||||
type SandboxPermissions,
|
||||
GOVERNANCE_FILES,
|
||||
getSecretFileFindArgs,
|
||||
sanitizePaths,
|
||||
type ParsedSandboxDenial,
|
||||
resolveSandboxPaths,
|
||||
} from '../../services/sandboxManager.js';
|
||||
@@ -24,24 +22,18 @@ import {
|
||||
sanitizeEnvironment,
|
||||
getSecureSanitizationConfig,
|
||||
} from '../../services/environmentSanitization.js';
|
||||
import { debugLogger } from '../../utils/debugLogger.js';
|
||||
import { spawnAsync } from '../../utils/shell-utils.js';
|
||||
import {
|
||||
isStrictlyApproved,
|
||||
verifySandboxOverrides,
|
||||
getCommandName,
|
||||
} from '../utils/commandUtils.js';
|
||||
import {
|
||||
tryRealpath,
|
||||
resolveGitWorktreePaths,
|
||||
isErrnoException,
|
||||
} from '../utils/fsUtils.js';
|
||||
import {
|
||||
isKnownSafeCommand,
|
||||
isDangerousCommand,
|
||||
} from '../utils/commandSafety.js';
|
||||
import { parsePosixSandboxDenials } from '../utils/sandboxDenialUtils.js';
|
||||
import { handleReadWriteCommands } from '../utils/sandboxReadWriteUtils.js';
|
||||
import { buildBwrapArgs } from './bwrapArgsBuilder.js';
|
||||
|
||||
let cachedBpfPath: string | undefined;
|
||||
|
||||
@@ -240,175 +232,40 @@ export class LinuxSandboxManager implements SandboxManager {
|
||||
|
||||
const sanitizedEnv = sanitizeEnvironment(req.env, sanitizationConfig);
|
||||
|
||||
const bwrapArgs: string[] = [
|
||||
'--unshare-all',
|
||||
'--new-session', // Isolate session
|
||||
'--die-with-parent', // Prevent orphaned runaway processes
|
||||
];
|
||||
|
||||
if (mergedAdditional.network) {
|
||||
bwrapArgs.push('--share-net');
|
||||
}
|
||||
|
||||
bwrapArgs.push(
|
||||
'--ro-bind',
|
||||
'/',
|
||||
'/',
|
||||
'--dev', // Creates a safe, minimal /dev (replaces --dev-bind)
|
||||
'/dev',
|
||||
'--proc', // Creates a fresh procfs for the unshared PID namespace
|
||||
'/proc',
|
||||
'--tmpfs', // Provides an isolated, writable /tmp directory
|
||||
'/tmp',
|
||||
);
|
||||
|
||||
const workspacePath = tryRealpath(this.options.workspace);
|
||||
|
||||
const bindFlag = workspaceWrite ? '--bind-try' : '--ro-bind-try';
|
||||
|
||||
if (workspaceWrite) {
|
||||
bwrapArgs.push(
|
||||
'--bind-try',
|
||||
this.options.workspace,
|
||||
this.options.workspace,
|
||||
);
|
||||
if (workspacePath !== this.options.workspace) {
|
||||
bwrapArgs.push('--bind-try', workspacePath, workspacePath);
|
||||
}
|
||||
} else {
|
||||
bwrapArgs.push(
|
||||
'--ro-bind-try',
|
||||
this.options.workspace,
|
||||
this.options.workspace,
|
||||
);
|
||||
if (workspacePath !== this.options.workspace) {
|
||||
bwrapArgs.push('--ro-bind-try', workspacePath, workspacePath);
|
||||
}
|
||||
}
|
||||
|
||||
const { worktreeGitDir, mainGitDir } =
|
||||
resolveGitWorktreePaths(workspacePath);
|
||||
if (worktreeGitDir) {
|
||||
bwrapArgs.push(bindFlag, worktreeGitDir, worktreeGitDir);
|
||||
}
|
||||
if (mainGitDir) {
|
||||
bwrapArgs.push(bindFlag, mainGitDir, mainGitDir);
|
||||
}
|
||||
|
||||
const includeDirs = sanitizePaths(this.options.includeDirectories);
|
||||
for (const includeDir of includeDirs) {
|
||||
try {
|
||||
const resolved = tryRealpath(includeDir);
|
||||
bwrapArgs.push('--ro-bind-try', resolved, resolved);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
const { allowed: allowedPaths, forbidden: forbiddenPaths } =
|
||||
await resolveSandboxPaths(this.options, req);
|
||||
|
||||
const normalizedWorkspace = normalize(workspacePath).replace(/\/$/, '');
|
||||
for (const allowedPath of allowedPaths) {
|
||||
const resolved = tryRealpath(allowedPath);
|
||||
if (!fs.existsSync(resolved)) {
|
||||
// If the path doesn't exist, we still want to allow access to its parent
|
||||
// if it's explicitly allowed, to enable creating it.
|
||||
try {
|
||||
const resolvedParent = tryRealpath(dirname(resolved));
|
||||
bwrapArgs.push(
|
||||
req.command === '__write' ? '--bind-try' : bindFlag,
|
||||
resolvedParent,
|
||||
resolvedParent,
|
||||
);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const normalizedAllowedPath = normalize(resolved).replace(/\/$/, '');
|
||||
if (normalizedAllowedPath !== normalizedWorkspace) {
|
||||
bwrapArgs.push('--bind-try', resolved, resolved);
|
||||
}
|
||||
}
|
||||
|
||||
const additionalReads = sanitizePaths(mergedAdditional.fileSystem?.read);
|
||||
for (const p of additionalReads) {
|
||||
try {
|
||||
const safeResolvedPath = tryRealpath(p);
|
||||
bwrapArgs.push('--ro-bind-try', safeResolvedPath, safeResolvedPath);
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
const additionalWrites = sanitizePaths(mergedAdditional.fileSystem?.write);
|
||||
for (const p of additionalWrites) {
|
||||
try {
|
||||
const safeResolvedPath = tryRealpath(p);
|
||||
bwrapArgs.push('--bind-try', safeResolvedPath, safeResolvedPath);
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
for (const file of GOVERNANCE_FILES) {
|
||||
const filePath = join(this.options.workspace, file.path);
|
||||
touch(filePath, file.isDirectory);
|
||||
const realPath = tryRealpath(filePath);
|
||||
bwrapArgs.push('--ro-bind', filePath, filePath);
|
||||
if (realPath !== filePath) {
|
||||
bwrapArgs.push('--ro-bind', realPath, realPath);
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of forbiddenPaths) {
|
||||
let resolved: string;
|
||||
try {
|
||||
resolved = tryRealpath(p); // Forbidden paths should still resolve to block the real path
|
||||
if (!fs.existsSync(resolved)) continue;
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(
|
||||
`Failed to resolve forbidden path ${p}: ${e instanceof Error ? e.message : String(e)}`,
|
||||
);
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', p);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const stat = fs.statSync(resolved);
|
||||
if (stat.isDirectory()) {
|
||||
bwrapArgs.push('--tmpfs', resolved, '--remount-ro', resolved);
|
||||
} else {
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', resolved);
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (isErrnoException(e) && e.code === 'ENOENT') {
|
||||
bwrapArgs.push('--symlink', '/dev/null', resolved);
|
||||
} else {
|
||||
debugLogger.warn(
|
||||
`Failed to stat forbidden path ${resolved}: ${e instanceof Error ? e.message : String(e)}`,
|
||||
);
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', resolved);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Mask secret files (.env, .env.*)
|
||||
bwrapArgs.push(
|
||||
...(await this.getSecretFilesArgs(req.policy?.allowedPaths)),
|
||||
);
|
||||
const bwrapArgs = await buildBwrapArgs({
|
||||
workspace: this.options.workspace,
|
||||
workspaceWrite,
|
||||
networkAccess,
|
||||
allowedPaths,
|
||||
forbiddenPaths,
|
||||
additionalPermissions: mergedAdditional,
|
||||
includeDirectories: this.options.includeDirectories || [],
|
||||
maskFilePath: this.getMaskFilePath(),
|
||||
isWriteCommand: req.command === '__write',
|
||||
});
|
||||
|
||||
const bpfPath = getSeccompBpfPath();
|
||||
|
||||
bwrapArgs.push('--seccomp', '9');
|
||||
bwrapArgs.push('--', finalCommand, ...finalArgs);
|
||||
|
||||
const argsPath = this.writeArgsToTempFile(bwrapArgs);
|
||||
|
||||
const shArgs = [
|
||||
'-c',
|
||||
'bpf_path="$1"; shift; exec bwrap "$@" 9< "$bpf_path"',
|
||||
'bpf_path="$1"; args_path="$2"; shift 2; exec bwrap --args 8 "$@" 8< "$args_path" 9< "$bpf_path"',
|
||||
'_',
|
||||
bpfPath,
|
||||
...bwrapArgs,
|
||||
argsPath,
|
||||
'--',
|
||||
finalCommand,
|
||||
...finalArgs,
|
||||
];
|
||||
|
||||
return {
|
||||
@@ -416,70 +273,23 @@ export class LinuxSandboxManager implements SandboxManager {
|
||||
args: shArgs,
|
||||
env: sanitizedEnv,
|
||||
cwd: req.cwd,
|
||||
cleanup: () => {
|
||||
try {
|
||||
fs.unlinkSync(argsPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates bubblewrap arguments to mask secret files.
|
||||
*/
|
||||
private async getSecretFilesArgs(allowedPaths?: string[]): Promise<string[]> {
|
||||
const args: string[] = [];
|
||||
const maskPath = this.getMaskFilePath();
|
||||
const paths = sanitizePaths(allowedPaths) || [];
|
||||
const searchDirs = new Set([this.options.workspace, ...paths]);
|
||||
const findPatterns = getSecretFileFindArgs();
|
||||
|
||||
for (const dir of searchDirs) {
|
||||
try {
|
||||
// Use the native 'find' command for performance and to catch nested secrets.
|
||||
// We limit depth to 3 to keep it fast while covering common nested structures.
|
||||
// We use -prune to skip heavy directories efficiently while matching dotfiles.
|
||||
const findResult = await spawnAsync('find', [
|
||||
dir,
|
||||
'-maxdepth',
|
||||
'3',
|
||||
'-type',
|
||||
'd',
|
||||
'(',
|
||||
'-name',
|
||||
'.git',
|
||||
'-o',
|
||||
'-name',
|
||||
'node_modules',
|
||||
'-o',
|
||||
'-name',
|
||||
'.venv',
|
||||
'-o',
|
||||
'-name',
|
||||
'__pycache__',
|
||||
'-o',
|
||||
'-name',
|
||||
'dist',
|
||||
'-o',
|
||||
'-name',
|
||||
'build',
|
||||
')',
|
||||
'-prune',
|
||||
'-o',
|
||||
'-type',
|
||||
'f',
|
||||
...findPatterns,
|
||||
'-print0',
|
||||
]);
|
||||
|
||||
const files = findResult.stdout.toString().split('\0');
|
||||
for (const file of files) {
|
||||
if (file.trim()) {
|
||||
args.push('--bind', maskPath, file.trim());
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
debugLogger.log(
|
||||
`LinuxSandboxManager: Failed to find or mask secret files in ${dir}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
return args;
|
||||
private writeArgsToTempFile(args: string[]): string {
|
||||
const tempFile = join(
|
||||
os.tmpdir(),
|
||||
`gemini-cli-bwrap-args-${Date.now()}-${Math.random().toString(36).slice(2)}.args`,
|
||||
);
|
||||
const content = Buffer.from(args.join('\0') + '\0');
|
||||
fs.writeFileSync(tempFile, content, { mode: 0o600 });
|
||||
return tempFile;
|
||||
}
|
||||
}
|
||||
|
||||
296
packages/core/src/sandbox/linux/bwrapArgsBuilder.test.ts
Normal file
296
packages/core/src/sandbox/linux/bwrapArgsBuilder.test.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { buildBwrapArgs, type BwrapArgsOptions } from './bwrapArgsBuilder.js';
|
||||
import fs from 'node:fs';
|
||||
import * as shellUtils from '../../utils/shell-utils.js';
|
||||
|
||||
vi.mock('node:fs', async () => {
|
||||
const actual = await vi.importActual<typeof import('node:fs')>('node:fs');
|
||||
return {
|
||||
...actual,
|
||||
default: {
|
||||
// @ts-expect-error - Property 'default' does not exist on type 'typeof import("node:fs")'
|
||||
...actual.default,
|
||||
existsSync: vi.fn(() => true),
|
||||
realpathSync: vi.fn((p) => p.toString()),
|
||||
statSync: vi.fn(() => ({ isDirectory: () => true }) as fs.Stats),
|
||||
mkdirSync: vi.fn(),
|
||||
mkdtempSync: vi.fn((prefix: string) => prefix + 'mocked'),
|
||||
openSync: vi.fn(),
|
||||
closeSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
readdirSync: vi.fn(() => []),
|
||||
chmodSync: vi.fn(),
|
||||
unlinkSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
},
|
||||
existsSync: vi.fn(() => true),
|
||||
realpathSync: vi.fn((p) => p.toString()),
|
||||
statSync: vi.fn(() => ({ isDirectory: () => true }) as fs.Stats),
|
||||
mkdirSync: vi.fn(),
|
||||
mkdtempSync: vi.fn((prefix: string) => prefix + 'mocked'),
|
||||
openSync: vi.fn(),
|
||||
closeSync: vi.fn(),
|
||||
writeFileSync: vi.fn(),
|
||||
readdirSync: vi.fn(() => []),
|
||||
chmodSync: vi.fn(),
|
||||
unlinkSync: vi.fn(),
|
||||
rmSync: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock('../../utils/shell-utils.js', async (importOriginal) => {
|
||||
const actual =
|
||||
await importOriginal<typeof import('../../utils/shell-utils.js')>();
|
||||
return {
|
||||
...actual,
|
||||
spawnAsync: vi.fn(() =>
|
||||
Promise.resolve({ status: 0, stdout: Buffer.from('') }),
|
||||
),
|
||||
initializeShellParsers: vi.fn(),
|
||||
isStrictlyApproved: vi.fn().mockResolvedValue(true),
|
||||
};
|
||||
});
|
||||
|
||||
describe('buildBwrapArgs', () => {
|
||||
const workspace = '/home/user/workspace';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
vi.mocked(fs.existsSync).mockReturnValue(true);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => p.toString());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
const defaultOptions: BwrapArgsOptions = {
|
||||
workspace,
|
||||
workspaceWrite: false,
|
||||
networkAccess: false,
|
||||
allowedPaths: [],
|
||||
forbiddenPaths: [],
|
||||
additionalPermissions: {},
|
||||
includeDirectories: [],
|
||||
maskFilePath: '/tmp/mask',
|
||||
isWriteCommand: false,
|
||||
};
|
||||
|
||||
it('should correctly format the base arguments', async () => {
|
||||
const args = await buildBwrapArgs(defaultOptions);
|
||||
|
||||
expect(args).toEqual([
|
||||
'--unshare-all',
|
||||
'--new-session',
|
||||
'--die-with-parent',
|
||||
'--ro-bind',
|
||||
'/',
|
||||
'/',
|
||||
'--dev',
|
||||
'/dev',
|
||||
'--proc',
|
||||
'/proc',
|
||||
'--tmpfs',
|
||||
'/tmp',
|
||||
'--ro-bind-try',
|
||||
workspace,
|
||||
workspace,
|
||||
'--ro-bind',
|
||||
`${workspace}/.gitignore`,
|
||||
`${workspace}/.gitignore`,
|
||||
'--ro-bind',
|
||||
`${workspace}/.geminiignore`,
|
||||
`${workspace}/.geminiignore`,
|
||||
'--ro-bind',
|
||||
`${workspace}/.git`,
|
||||
`${workspace}/.git`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('binds workspace read-write when workspaceWrite is true', async () => {
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
workspaceWrite: true,
|
||||
});
|
||||
|
||||
expect(args).toContain('--bind-try');
|
||||
const bindIndex = args.indexOf('--bind-try');
|
||||
expect(args[bindIndex + 1]).toBe(workspace);
|
||||
});
|
||||
|
||||
it('maps network permissions to --share-net', async () => {
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
networkAccess: true,
|
||||
});
|
||||
|
||||
expect(args).toContain('--share-net');
|
||||
});
|
||||
|
||||
it('maps explicit write permissions to --bind-try', async () => {
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
additionalPermissions: {
|
||||
fileSystem: { write: ['/home/user/workspace/out/dir'] },
|
||||
},
|
||||
});
|
||||
|
||||
const index = args.indexOf('--bind-try');
|
||||
expect(index).not.toBe(-1);
|
||||
expect(args[index + 1]).toBe('/home/user/workspace/out/dir');
|
||||
});
|
||||
|
||||
it('should protect both the symlink and the real path of governance files', async () => {
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p.toString() === `${workspace}/.gitignore`)
|
||||
return '/shared/global.gitignore';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs(defaultOptions);
|
||||
|
||||
expect(args).toContain('--ro-bind');
|
||||
expect(args).toContain(`${workspace}/.gitignore`);
|
||||
expect(args).toContain('/shared/global.gitignore');
|
||||
});
|
||||
|
||||
it('should parameterize allowed paths and normalize them', async () => {
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
allowedPaths: ['/tmp/cache', '/opt/tools', workspace],
|
||||
});
|
||||
|
||||
expect(args).toContain('--bind-try');
|
||||
expect(args[args.indexOf('/tmp/cache') - 1]).toBe('--bind-try');
|
||||
expect(args[args.indexOf('/opt/tools') - 1]).toBe('--bind-try');
|
||||
});
|
||||
|
||||
it('should bind the parent directory of a non-existent path', async () => {
|
||||
vi.mocked(fs.existsSync).mockImplementation((p) => {
|
||||
if (p === '/home/user/workspace/new-file.txt') return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
allowedPaths: ['/home/user/workspace/new-file.txt'],
|
||||
isWriteCommand: true,
|
||||
});
|
||||
|
||||
const parentDir = '/home/user/workspace';
|
||||
const bindIndex = args.lastIndexOf(parentDir);
|
||||
expect(bindIndex).not.toBe(-1);
|
||||
expect(args[bindIndex - 2]).toBe('--bind-try');
|
||||
});
|
||||
|
||||
it('should parameterize forbidden paths and explicitly deny them', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation((p) => {
|
||||
if (p.toString().includes('cache')) {
|
||||
return { isDirectory: () => true } as fs.Stats;
|
||||
}
|
||||
return { isDirectory: () => false } as fs.Stats;
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
forbiddenPaths: ['/tmp/cache', '/opt/secret.txt'],
|
||||
});
|
||||
|
||||
const cacheIndex = args.indexOf('/tmp/cache');
|
||||
expect(args[cacheIndex - 1]).toBe('--tmpfs');
|
||||
|
||||
const secretIndex = args.indexOf('/opt/secret.txt');
|
||||
expect(args[secretIndex - 2]).toBe('--ro-bind');
|
||||
expect(args[secretIndex - 1]).toBe('/dev/null');
|
||||
});
|
||||
|
||||
it('resolves forbidden symlink paths to their real paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => false }) as fs.Stats,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === '/tmp/forbidden-symlink') return '/opt/real-target.txt';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
forbiddenPaths: ['/tmp/forbidden-symlink'],
|
||||
});
|
||||
|
||||
const secretIndex = args.indexOf('/opt/real-target.txt');
|
||||
expect(args[secretIndex - 2]).toBe('--ro-bind');
|
||||
expect(args[secretIndex - 1]).toBe('/dev/null');
|
||||
});
|
||||
|
||||
it('masks directory symlinks with tmpfs for both paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => true }) as fs.Stats,
|
||||
);
|
||||
vi.mocked(fs.realpathSync).mockImplementation((p) => {
|
||||
if (p === '/tmp/dir-link') return '/opt/real-dir';
|
||||
return p.toString();
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
forbiddenPaths: ['/tmp/dir-link'],
|
||||
});
|
||||
|
||||
const idx = args.indexOf('/opt/real-dir');
|
||||
expect(args[idx - 1]).toBe('--tmpfs');
|
||||
});
|
||||
|
||||
it('should override allowed paths if a path is also in forbidden paths', async () => {
|
||||
vi.mocked(fs.statSync).mockImplementation(
|
||||
() => ({ isDirectory: () => true }) as fs.Stats,
|
||||
);
|
||||
|
||||
const args = await buildBwrapArgs({
|
||||
...defaultOptions,
|
||||
forbiddenPaths: ['/tmp/conflict'],
|
||||
allowedPaths: ['/tmp/conflict'],
|
||||
});
|
||||
|
||||
const bindIndex = args.findIndex(
|
||||
(a, i) => a === '--bind-try' && args[i + 1] === '/tmp/conflict',
|
||||
);
|
||||
const tmpfsIndex = args.findIndex(
|
||||
(a, i) => a === '--tmpfs' && args[i + 1] === '/tmp/conflict',
|
||||
);
|
||||
|
||||
expect(bindIndex).toBeGreaterThan(-1);
|
||||
expect(tmpfsIndex).toBeGreaterThan(bindIndex);
|
||||
expect(args[tmpfsIndex + 1]).toBe('/tmp/conflict');
|
||||
});
|
||||
|
||||
it('blocks .env and .env.* files', async () => {
|
||||
vi.mocked(shellUtils.spawnAsync).mockImplementation((cmd, args) => {
|
||||
if (cmd === 'find' && args?.[0] === workspace) {
|
||||
return Promise.resolve({
|
||||
status: 0,
|
||||
stdout: Buffer.from(`${workspace}/.env\0${workspace}/.env.local\0`),
|
||||
} as unknown as ReturnType<typeof shellUtils.spawnAsync>);
|
||||
}
|
||||
return Promise.resolve({
|
||||
status: 0,
|
||||
stdout: Buffer.from(''),
|
||||
} as unknown as ReturnType<typeof shellUtils.spawnAsync>);
|
||||
});
|
||||
|
||||
const args = await buildBwrapArgs(defaultOptions);
|
||||
|
||||
expect(args).toContain(`${workspace}/.env`);
|
||||
expect(args).toContain(`${workspace}/.env.local`);
|
||||
|
||||
const envIndex = args.indexOf(`${workspace}/.env`);
|
||||
expect(args[envIndex - 2]).toBe('--bind');
|
||||
expect(args[envIndex - 1]).toBe('/tmp/mask');
|
||||
});
|
||||
});
|
||||
263
packages/core/src/sandbox/linux/bwrapArgsBuilder.ts
Normal file
263
packages/core/src/sandbox/linux/bwrapArgsBuilder.ts
Normal file
@@ -0,0 +1,263 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import fs from 'node:fs';
|
||||
import { join, dirname, normalize } from 'node:path';
|
||||
import {
|
||||
type SandboxPermissions,
|
||||
GOVERNANCE_FILES,
|
||||
getSecretFileFindArgs,
|
||||
sanitizePaths,
|
||||
} from '../../services/sandboxManager.js';
|
||||
import {
|
||||
tryRealpath,
|
||||
resolveGitWorktreePaths,
|
||||
isErrnoException,
|
||||
} from '../utils/fsUtils.js';
|
||||
import { spawnAsync } from '../../utils/shell-utils.js';
|
||||
import { debugLogger } from '../../utils/debugLogger.js';
|
||||
|
||||
/**
|
||||
* Options for building bubblewrap (bwrap) arguments.
|
||||
*/
|
||||
export interface BwrapArgsOptions {
|
||||
workspace: string;
|
||||
workspaceWrite: boolean;
|
||||
networkAccess: boolean;
|
||||
allowedPaths: string[];
|
||||
forbiddenPaths: string[];
|
||||
additionalPermissions: SandboxPermissions;
|
||||
includeDirectories: string[];
|
||||
maskFilePath: string;
|
||||
isWriteCommand: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds the list of bubblewrap arguments based on the provided options.
|
||||
*/
|
||||
export async function buildBwrapArgs(
|
||||
options: BwrapArgsOptions,
|
||||
): Promise<string[]> {
|
||||
const bwrapArgs: string[] = [
|
||||
'--unshare-all',
|
||||
'--new-session', // Isolate session
|
||||
'--die-with-parent', // Prevent orphaned runaway processes
|
||||
];
|
||||
|
||||
if (options.networkAccess || options.additionalPermissions.network) {
|
||||
bwrapArgs.push('--share-net');
|
||||
}
|
||||
|
||||
bwrapArgs.push(
|
||||
'--ro-bind',
|
||||
'/',
|
||||
'/',
|
||||
'--dev', // Creates a safe, minimal /dev (replaces --dev-bind)
|
||||
'/dev',
|
||||
'--proc', // Creates a fresh procfs for the unshared PID namespace
|
||||
'/proc',
|
||||
'--tmpfs', // Provides an isolated, writable /tmp directory
|
||||
'/tmp',
|
||||
);
|
||||
|
||||
const workspacePath = tryRealpath(options.workspace);
|
||||
|
||||
const bindFlag = options.workspaceWrite ? '--bind-try' : '--ro-bind-try';
|
||||
|
||||
if (options.workspaceWrite) {
|
||||
bwrapArgs.push('--bind-try', options.workspace, options.workspace);
|
||||
if (workspacePath !== options.workspace) {
|
||||
bwrapArgs.push('--bind-try', workspacePath, workspacePath);
|
||||
}
|
||||
} else {
|
||||
bwrapArgs.push('--ro-bind-try', options.workspace, options.workspace);
|
||||
if (workspacePath !== options.workspace) {
|
||||
bwrapArgs.push('--ro-bind-try', workspacePath, workspacePath);
|
||||
}
|
||||
}
|
||||
|
||||
const { worktreeGitDir, mainGitDir } = resolveGitWorktreePaths(workspacePath);
|
||||
if (worktreeGitDir) {
|
||||
bwrapArgs.push(bindFlag, worktreeGitDir, worktreeGitDir);
|
||||
}
|
||||
if (mainGitDir) {
|
||||
bwrapArgs.push(bindFlag, mainGitDir, mainGitDir);
|
||||
}
|
||||
|
||||
const includeDirs = sanitizePaths(options.includeDirectories);
|
||||
for (const includeDir of includeDirs) {
|
||||
try {
|
||||
const resolved = tryRealpath(includeDir);
|
||||
bwrapArgs.push('--ro-bind-try', resolved, resolved);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedWorkspace = normalize(workspacePath).replace(/\/$/, '');
|
||||
for (const allowedPath of options.allowedPaths) {
|
||||
const resolved = tryRealpath(allowedPath);
|
||||
if (!fs.existsSync(resolved)) {
|
||||
// If the path doesn't exist, we still want to allow access to its parent
|
||||
// if it's explicitly allowed, to enable creating it.
|
||||
try {
|
||||
const resolvedParent = tryRealpath(dirname(resolved));
|
||||
bwrapArgs.push(
|
||||
options.isWriteCommand ? '--bind-try' : bindFlag,
|
||||
resolvedParent,
|
||||
resolvedParent,
|
||||
);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const normalizedAllowedPath = normalize(resolved).replace(/\/$/, '');
|
||||
if (normalizedAllowedPath !== normalizedWorkspace) {
|
||||
bwrapArgs.push('--bind-try', resolved, resolved);
|
||||
}
|
||||
}
|
||||
|
||||
const additionalReads = sanitizePaths(
|
||||
options.additionalPermissions.fileSystem?.read,
|
||||
);
|
||||
for (const p of additionalReads) {
|
||||
try {
|
||||
const safeResolvedPath = tryRealpath(p);
|
||||
bwrapArgs.push('--ro-bind-try', safeResolvedPath, safeResolvedPath);
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
const additionalWrites = sanitizePaths(
|
||||
options.additionalPermissions.fileSystem?.write,
|
||||
);
|
||||
for (const p of additionalWrites) {
|
||||
try {
|
||||
const safeResolvedPath = tryRealpath(p);
|
||||
bwrapArgs.push('--bind-try', safeResolvedPath, safeResolvedPath);
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
for (const file of GOVERNANCE_FILES) {
|
||||
const filePath = join(options.workspace, file.path);
|
||||
const realPath = tryRealpath(filePath);
|
||||
bwrapArgs.push('--ro-bind', filePath, filePath);
|
||||
if (realPath !== filePath) {
|
||||
bwrapArgs.push('--ro-bind', realPath, realPath);
|
||||
}
|
||||
}
|
||||
|
||||
for (const p of options.forbiddenPaths) {
|
||||
let resolved: string;
|
||||
try {
|
||||
resolved = tryRealpath(p); // Forbidden paths should still resolve to block the real path
|
||||
if (!fs.existsSync(resolved)) continue;
|
||||
} catch (e: unknown) {
|
||||
debugLogger.warn(
|
||||
`Failed to resolve forbidden path ${p}: ${e instanceof Error ? e.message : String(e)}`,
|
||||
);
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', p);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const stat = fs.statSync(resolved);
|
||||
if (stat.isDirectory()) {
|
||||
bwrapArgs.push('--tmpfs', resolved, '--remount-ro', resolved);
|
||||
} else {
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', resolved);
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
if (isErrnoException(e) && e.code === 'ENOENT') {
|
||||
bwrapArgs.push('--symlink', '/dev/null', resolved);
|
||||
} else {
|
||||
debugLogger.warn(
|
||||
`Failed to stat forbidden path ${resolved}: ${e instanceof Error ? e.message : String(e)}`,
|
||||
);
|
||||
bwrapArgs.push('--ro-bind', '/dev/null', resolved);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Mask secret files (.env, .env.*)
|
||||
const secretArgs = await getSecretFilesArgs(
|
||||
options.workspace,
|
||||
options.allowedPaths,
|
||||
options.maskFilePath,
|
||||
);
|
||||
bwrapArgs.push(...secretArgs);
|
||||
|
||||
return bwrapArgs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates bubblewrap arguments to mask secret files.
|
||||
*/
|
||||
async function getSecretFilesArgs(
|
||||
workspace: string,
|
||||
allowedPaths: string[],
|
||||
maskPath: string,
|
||||
): Promise<string[]> {
|
||||
const args: string[] = [];
|
||||
const searchDirs = new Set([workspace, ...allowedPaths]);
|
||||
const findPatterns = getSecretFileFindArgs();
|
||||
|
||||
for (const dir of searchDirs) {
|
||||
try {
|
||||
// Use the native 'find' command for performance and to catch nested secrets.
|
||||
// We limit depth to 3 to keep it fast while covering common nested structures.
|
||||
// We use -prune to skip heavy directories efficiently while matching dotfiles.
|
||||
const findResult = await spawnAsync('find', [
|
||||
dir,
|
||||
'-maxdepth',
|
||||
'3',
|
||||
'-type',
|
||||
'd',
|
||||
'(',
|
||||
'-name',
|
||||
'.git',
|
||||
'-o',
|
||||
'-name',
|
||||
'node_modules',
|
||||
'-o',
|
||||
'-name',
|
||||
'.venv',
|
||||
'-o',
|
||||
'-name',
|
||||
'__pycache__',
|
||||
'-o',
|
||||
'-name',
|
||||
'dist',
|
||||
'-o',
|
||||
'-name',
|
||||
'build',
|
||||
')',
|
||||
'-prune',
|
||||
'-o',
|
||||
'-type',
|
||||
'f',
|
||||
...findPatterns,
|
||||
'-print0',
|
||||
]);
|
||||
|
||||
const files = findResult.stdout.toString().split('\0');
|
||||
for (const file of files) {
|
||||
if (file.trim()) {
|
||||
args.push('--bind', maskPath, file.trim());
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
debugLogger.log(
|
||||
`LinuxSandboxManager: Failed to find or mask secret files in ${dir}`,
|
||||
e,
|
||||
);
|
||||
}
|
||||
}
|
||||
return args;
|
||||
}
|
||||
Reference in New Issue
Block a user