mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-05-16 06:43:07 -07:00
feat(workspaces): transform workspaces feature into a distributable extension
This commit is contained in:
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Shared Task Runner Utility
|
||||
* Handles parallel process execution, log streaming, and dashboard rendering.
|
||||
*/
|
||||
import { spawn } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
export interface Task {
|
||||
id: string;
|
||||
name: string;
|
||||
cmd: string;
|
||||
dep?: string;
|
||||
condition?: 'success' | 'fail';
|
||||
}
|
||||
|
||||
export class TaskRunner {
|
||||
private state: Record<string, { status: string; exitCode?: number }> = {};
|
||||
private tasks: Task[] = [];
|
||||
private logDir: string;
|
||||
private header: string;
|
||||
|
||||
constructor(logDir: string, header: string) {
|
||||
this.logDir = logDir;
|
||||
this.header = header;
|
||||
fs.mkdirSync(logDir, { recursive: true });
|
||||
}
|
||||
|
||||
register(tasks: Task[]) {
|
||||
this.tasks = tasks;
|
||||
tasks.forEach(t => this.state[t.id] = { status: 'PENDING' });
|
||||
}
|
||||
|
||||
async run() {
|
||||
const runQueue = this.tasks.filter(t => !t.dep);
|
||||
runQueue.forEach(t => this.execute(t));
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const checkInterval = setInterval(() => {
|
||||
const allDone = this.tasks.every(t =>
|
||||
['SUCCESS', 'FAILED', 'SKIPPED'].includes(this.state[t.id].status)
|
||||
);
|
||||
|
||||
if (allDone) {
|
||||
clearInterval(checkInterval);
|
||||
console.log('\n✨ All tasks complete.');
|
||||
resolve(this.state);
|
||||
}
|
||||
|
||||
// Check for dependencies
|
||||
this.tasks.filter(t => t.dep && this.state[t.id].status === 'PENDING').forEach(t => {
|
||||
const parent = this.state[t.dep!];
|
||||
if (parent.status === 'SUCCESS' && (!t.condition || t.condition === 'success')) {
|
||||
this.execute(t);
|
||||
} else if (parent.status === 'FAILED' && t.condition === 'fail') {
|
||||
this.execute(t);
|
||||
} else if (['SUCCESS', 'FAILED'].includes(parent.status)) {
|
||||
this.state[t.id].status = 'SKIPPED';
|
||||
}
|
||||
});
|
||||
|
||||
this.render();
|
||||
}, 1500);
|
||||
});
|
||||
}
|
||||
|
||||
private execute(task: Task) {
|
||||
this.state[task.id].status = 'RUNNING';
|
||||
const proc = spawn(task.cmd, { shell: true, env: { ...process.env, FORCE_COLOR: '1' } });
|
||||
|
||||
const logStream = fs.createWriteStream(path.join(this.logDir, `${task.id}.log`));
|
||||
proc.stdout.pipe(logStream);
|
||||
proc.stderr.pipe(logStream);
|
||||
|
||||
proc.on('close', (code) => {
|
||||
const exitCode = code ?? 0;
|
||||
this.state[task.id].status = exitCode === 0 ? 'SUCCESS' : 'FAILED';
|
||||
this.state[task.id].exitCode = exitCode;
|
||||
fs.writeFileSync(path.join(this.logDir, `${task.id}.exit`), exitCode.toString());
|
||||
});
|
||||
}
|
||||
|
||||
private render() {
|
||||
console.clear();
|
||||
console.log('==================================================');
|
||||
console.log(this.header);
|
||||
console.log('==================================================\n');
|
||||
|
||||
this.tasks.forEach(t => {
|
||||
const s = this.state[t.id];
|
||||
const icon = s.status === 'SUCCESS' ? '✅' : s.status === 'FAILED' ? '❌' : s.status === 'RUNNING' ? '⏳' : s.status === 'SKIPPED' ? '⏭️ ' : '💤';
|
||||
console.log(` ${icon} ${t.name.padEnd(20)}: ${s.status}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Workspace Attach Utility (Local)
|
||||
*
|
||||
* Re-attaches to a running tmux session inside the container on the worker.
|
||||
*/
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { spawnSync } from 'child_process';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
const q = (str: string) => `'${str.replace(/'/g, "'\\''")}'`;
|
||||
|
||||
export async function runAttach(args: string[], env: NodeJS.ProcessEnv = process.env) {
|
||||
const prNumber = args[0];
|
||||
const action = args[1] || 'review';
|
||||
const isLocal = args.includes('--local');
|
||||
|
||||
if (!prNumber) {
|
||||
console.error('Usage: npm run workspace:attach <PR_NUMBER> [action] [--local]');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (!fs.existsSync(settingsPath)) {
|
||||
console.error('❌ Settings not found. Run "npm run workspace:setup" first.');
|
||||
return 1;
|
||||
}
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.workspace;
|
||||
if (!config) {
|
||||
console.error('❌ Deep Review configuration not found.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { projectId, zone } = config;
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
const provider = ProviderFactory.getProvider({ projectId, zone, instanceName: targetVM });
|
||||
|
||||
const sessionName = `workspace-${prNumber}-${action}`;
|
||||
const containerAttach = `sudo docker exec -it maintainer-worker sh -c ${q(`tmux attach-session -t ${sessionName}`)}`;
|
||||
const finalSSH = provider.getRunCommand(containerAttach, { interactive: true });
|
||||
|
||||
console.log(`🔗 Attaching to session: ${sessionName}...`);
|
||||
|
||||
const isWithinGemini = !!env.GEMINI_CLI || !!env.GEMINI_SESSION_ID || !!env.GCLI_SESSION_ID;
|
||||
if (isWithinGemini && !isLocal) {
|
||||
const tempCmdPath = path.join(process.env.TMPDIR || '/tmp', `workspace-attach-${prNumber}.sh`);
|
||||
fs.writeFileSync(tempCmdPath, `#!/bin/bash\n${finalSSH}\nrm "$0"`, { mode: 0o755 });
|
||||
|
||||
const appleScript = `
|
||||
on run argv
|
||||
tell application "iTerm"
|
||||
tell current window
|
||||
set newTab to (create tab with default profile)
|
||||
tell current session of newTab
|
||||
write text (item 1 of argv) & return
|
||||
end tell
|
||||
end tell
|
||||
activate
|
||||
end tell
|
||||
end run
|
||||
`;
|
||||
spawnSync('osascript', ['-', tempCmdPath], { input: appleScript });
|
||||
console.log(`✅ iTerm2 tab opened for ${sessionName}.`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
spawnSync(finalSSH, { stdio: 'inherit', shell: true });
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runAttach(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
export async function runChecker(args: string[], env: NodeJS.ProcessEnv = process.env) {
|
||||
const prNumber = args[0];
|
||||
if (!prNumber) {
|
||||
console.error('Usage: npm run review:check <PR_NUMBER>');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (!fs.existsSync(settingsPath)) {
|
||||
console.error('❌ Settings not found. Run "npm run workspace:setup" first.');
|
||||
return 1;
|
||||
}
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.workspace;
|
||||
if (!config) {
|
||||
console.error('❌ Deep Review configuration not found.');
|
||||
return 1;
|
||||
}
|
||||
const { projectId, zone, remoteWorkDir } = config;
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
const provider = ProviderFactory.getProvider({ projectId, zone, instanceName: targetVM });
|
||||
|
||||
console.log(`🔍 Checking remote status for PR #${prNumber} on ${targetVM}...`);
|
||||
|
||||
const branchView = spawnSync('gh', ['pr', 'view', prNumber, '--json', 'headRefName', '-q', '.headRefName'], { shell: true });
|
||||
const branchName = branchView.stdout.toString().trim();
|
||||
const logDir = `${remoteWorkDir}/${branchName}/.gemini/logs/review-${prNumber}`;
|
||||
|
||||
const tasks = ['build', 'ci', 'review', 'verify'];
|
||||
let allDone = true;
|
||||
|
||||
console.log('\n--- Task Status ---');
|
||||
for (const task of tasks) {
|
||||
const exitFile = `${logDir}/${task}.exit`;
|
||||
const checkExit = await provider.getExecOutput(`[ -f ${exitFile} ] && cat ${exitFile}`, { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
if (checkExit.status === 0 && checkExit.stdout.trim()) {
|
||||
const code = checkExit.stdout.trim();
|
||||
console.log(` ${code === '0' ? '✅' : '❌'} ${task.padEnd(10)}: ${code === '0' ? 'SUCCESS' : `FAILED (exit ${code})`}`);
|
||||
} else {
|
||||
const checkRunning = await provider.exec(`[ -f ${logDir}/${task}.log ]`, { wrapContainer: 'maintainer-worker' });
|
||||
if (checkRunning === 0) {
|
||||
console.log(` ⏳ ${task.padEnd(10)}: RUNNING`);
|
||||
} else {
|
||||
console.log(` 💤 ${task.padEnd(10)}: PENDING`);
|
||||
}
|
||||
allDone = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (allDone) {
|
||||
console.log('\n✨ All remote tasks complete. You can now synthesize the results.');
|
||||
} else {
|
||||
console.log('\n⏳ Some tasks are still in progress. Check again in a few minutes.');
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runChecker(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
/**
|
||||
* Universal Workspace Cleanup (Local)
|
||||
*
|
||||
* Surgical or full cleanup of sessions and worktrees on the GCE worker.
|
||||
* Refactored to use WorkerProvider for container compatibility.
|
||||
*/
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import readline from 'readline';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
async function confirm(question: string): Promise<boolean> {
|
||||
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
||||
return new Promise((resolve) => {
|
||||
rl.question(`${question} (y/n): `, (answer) => {
|
||||
rl.close();
|
||||
resolve(answer.trim().toLowerCase() === 'y');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function runCleanup(args: string[], env: NodeJS.ProcessEnv = process.env) {
|
||||
const prNumber = args[0];
|
||||
const action = args[1];
|
||||
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (!fs.existsSync(settingsPath)) {
|
||||
console.error('❌ Settings not found. Run "npm run workspace:setup" first.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.workspace;
|
||||
if (!config) {
|
||||
console.error('❌ Workspace configuration not found.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { projectId, zone } = config;
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
const provider = ProviderFactory.getProvider({ projectId, zone, instanceName: targetVM });
|
||||
|
||||
if (prNumber && action) {
|
||||
const sessionName = `workspace-${prNumber}-${action}`;
|
||||
const worktreePath = `/home/node/.workspaces/worktrees/${sessionName}`;
|
||||
|
||||
console.log(`🧹 Surgically removing session and worktree for ${prNumber}-${action}...`);
|
||||
|
||||
// Kill specific tmux session inside container
|
||||
await provider.exec(`tmux kill-session -t ${sessionName} 2>/dev/null`, { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
// Remove specific worktree inside container
|
||||
await provider.exec(`cd /home/node/.workspaces/main && git worktree remove -f ${worktreePath} 2>/dev/null && git worktree prune`, { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
console.log(`✅ Cleaned up ${prNumber}-${action}.`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// --- Bulk Cleanup ---
|
||||
console.log(`⚠️ DANGER: You are about to perform a BULK cleanup on ${targetVM}.`);
|
||||
const confirmed = await confirm(' Are you sure you want to kill ALL sessions and worktrees?');
|
||||
if (!confirmed) {
|
||||
console.log('❌ Cleanup cancelled.');
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`🧹 Starting BULK cleanup...`);
|
||||
|
||||
// 1. Standard Cleanup
|
||||
console.log(' - Killing ALL remote tmux sessions...');
|
||||
await provider.exec(`tmux kill-server`, { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
console.log(' - Cleaning up Docker resources...');
|
||||
await provider.exec(`sudo docker rm -f maintainer-worker || true`);
|
||||
await provider.exec(`sudo docker system prune -af --volumes`);
|
||||
|
||||
console.log(' - Cleaning up ALL Git Worktrees...');
|
||||
await provider.exec(`cd /home/node/.workspaces/main && git worktree prune && rm -rf /home/node/.workspaces/worktrees/*`, { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
console.log('✅ Remote environment cleared.');
|
||||
|
||||
// 2. Full Wipe Option
|
||||
const shouldWipe = await confirm('\nWould you like to COMPLETELY wipe the remote workspace (main clone)?');
|
||||
|
||||
if (shouldWipe) {
|
||||
console.log(`🔥 Wiping /home/node/.workspaces/main...`);
|
||||
await provider.exec(`rm -rf /home/node/.workspaces/main && mkdir -p /home/node/.workspaces/main`, { wrapContainer: 'maintainer-worker' });
|
||||
console.log('✅ Remote hub wiped. You will need to run npm run workspace:setup again.');
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runCleanup(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Deep Review Entrypoint (Remote)
|
||||
*
|
||||
* This script is the single command executed by the remote tmux session.
|
||||
* It handles environment loading and sequence orchestration.
|
||||
*/
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const prNumber = process.argv[2];
|
||||
const branchName = process.argv[3];
|
||||
const policyPath = process.argv[4];
|
||||
const ISOLATED_CONFIG = process.env.GEMINI_CLI_HOME || path.join(process.env.HOME || '', '.workspaces/gemini-cli-config');
|
||||
|
||||
async function main() {
|
||||
if (!prNumber || !branchName || !policyPath) {
|
||||
console.error('Usage: tsx entrypoint.ts <PR_NUMBER> <BRANCH_NAME> <POLICY_PATH>');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const workDir = process.cwd(); // This is remoteWorkDir as set in review.ts
|
||||
const targetDir = path.join(workDir, branchName);
|
||||
|
||||
// Use global tools pre-installed in the maintainer image
|
||||
const tsxBin = 'tsx';
|
||||
const geminiBin = 'gemini';
|
||||
|
||||
const action = process.argv[5] || 'review';
|
||||
|
||||
// 1. Run the Parallel Reviewer
|
||||
console.log('🚀 Launching Parallel Review Worker...');
|
||||
console.log(` - Script: ${path.join(__dirname, 'worker.ts')}`);
|
||||
console.log(` - Action: ${action}`);
|
||||
|
||||
const workerResult = spawnSync(tsxBin, [path.join(__dirname, 'worker.ts'), prNumber, branchName, policyPath, action], {
|
||||
stdio: 'inherit',
|
||||
env: { ...process.env, GEMINI_CLI_HOME: ISOLATED_CONFIG }
|
||||
});
|
||||
|
||||
if (workerResult.status !== 0) {
|
||||
console.error(`❌ Worker failed with exit code ${workerResult.status}.`);
|
||||
if (workerResult.error) console.error(' Error:', workerResult.error.message);
|
||||
}
|
||||
|
||||
// 2. Launch the Interactive Gemini Session (Local Nightly)
|
||||
console.log('\n✨ Verification complete. Joining interactive session...');
|
||||
|
||||
const geminiArgs = ['--policy', policyPath];
|
||||
geminiArgs.push('-p', `Review for PR #${prNumber} is complete. Read the logs in .gemini/logs/review-${prNumber}/ and synthesize your findings.`);
|
||||
|
||||
process.chdir(targetDir);
|
||||
spawnSync(geminiBin, geminiArgs, {
|
||||
stdio: 'inherit',
|
||||
env: { ...process.env, GEMINI_CLI_HOME: ISOLATED_CONFIG }
|
||||
});
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* Workspace Fleet Manager
|
||||
*
|
||||
* Manages dynamic GCP workers for workspaces tasks.
|
||||
*/
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
const USER = process.env.USER || 'mattkorwel';
|
||||
const INSTANCE_PREFIX = `gcli-workspace-${USER}`;
|
||||
const DEFAULT_ZONE = 'us-west1-a';
|
||||
|
||||
function getProjectId(): string {
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (fs.existsSync(settingsPath)) {
|
||||
try {
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
return settings.workspace?.projectId;
|
||||
} catch (e) {}
|
||||
}
|
||||
return process.env.GOOGLE_CLOUD_PROJECT || '';
|
||||
}
|
||||
|
||||
async function listWorkers() {
|
||||
const projectId = getProjectId();
|
||||
if (!projectId) {
|
||||
console.error('❌ Project ID not found. Run "npm run workspace:setup" first.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`🔍 Listing Workspace Workers for ${USER} in ${projectId}...`);
|
||||
|
||||
spawnSync('gcloud', [
|
||||
'compute', 'instances', 'list',
|
||||
'--project', projectId,
|
||||
'--filter', `name~^${INSTANCE_PREFIX}`,
|
||||
'--format', 'table(name,zone,status,networkInterfaces[0].networkIP:label=INTERNAL_IP,creationTimestamp)'
|
||||
], { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
async function provisionWorker() {
|
||||
const projectId = getProjectId();
|
||||
if (!projectId) {
|
||||
console.error('❌ Project ID not found. Run "npm run workspace:setup" first.');
|
||||
return;
|
||||
}
|
||||
|
||||
const provider = ProviderFactory.getProvider({
|
||||
projectId: projectId,
|
||||
zone: DEFAULT_ZONE,
|
||||
instanceName: INSTANCE_PREFIX
|
||||
});
|
||||
|
||||
const status = await provider.getStatus();
|
||||
if (status.status !== 'UNKNOWN' && status.status !== 'ERROR') {
|
||||
console.log(`✅ Worker ${INSTANCE_PREFIX} already exists and is ${status.status}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
await provider.provision();
|
||||
}
|
||||
|
||||
async function stopWorker() {
|
||||
const projectId = getProjectId();
|
||||
const provider = ProviderFactory.getProvider({
|
||||
projectId: projectId,
|
||||
zone: DEFAULT_ZONE,
|
||||
instanceName: INSTANCE_PREFIX
|
||||
});
|
||||
|
||||
console.log(`🛑 Stopping workspace worker: ${INSTANCE_PREFIX}...`);
|
||||
await provider.stop();
|
||||
}
|
||||
|
||||
async function rebuildWorker() {
|
||||
const projectId = getProjectId();
|
||||
console.log(`🔥 Rebuilding worker ${INSTANCE_PREFIX}...`);
|
||||
|
||||
const knownHostsPath = path.join(REPO_ROOT, '.gemini/workspaces_known_hosts');
|
||||
if (fs.existsSync(knownHostsPath)) {
|
||||
console.log(` - Clearing isolated known_hosts...`);
|
||||
fs.unlinkSync(knownHostsPath);
|
||||
}
|
||||
|
||||
spawnSync('gcloud', ['compute', 'instances', 'delete', INSTANCE_PREFIX, '--project', projectId, '--zone', DEFAULT_ZONE, '--quiet'], { stdio: 'inherit' });
|
||||
await provisionWorker();
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const action = process.argv[2] || 'list';
|
||||
|
||||
switch (action) {
|
||||
case 'list':
|
||||
await listWorkers();
|
||||
break;
|
||||
case 'provision':
|
||||
await provisionWorker();
|
||||
break;
|
||||
case 'rebuild':
|
||||
await rebuildWorker();
|
||||
break;
|
||||
case 'stop':
|
||||
await stopWorker();
|
||||
break;
|
||||
default:
|
||||
console.error(`❌ Unknown fleet action: ${action}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* Workspace Log Tailer (Local)
|
||||
*
|
||||
* Tails the latest remote logs for a specific job.
|
||||
*/
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
export async function runLogs(args: string[]) {
|
||||
const prNumber = args[0];
|
||||
const action = args[1] || 'review';
|
||||
|
||||
if (!prNumber) {
|
||||
console.error('Usage: npm run workspace:logs <PR_NUMBER> [action]');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/settings.json');
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.maintainer?.workspace;
|
||||
const { remoteHost, remoteHome } = config;
|
||||
const sshConfigPath = path.join(REPO_ROOT, '.gemini/workspace_ssh_config');
|
||||
|
||||
const jobDir = `${remoteHome}/dev/worktrees/workspace-${prNumber}-${action}`;
|
||||
const logDir = `${jobDir}/.gemini/logs`;
|
||||
|
||||
console.log(`📋 Tailing latest logs for job ${prNumber}-${action}...`);
|
||||
|
||||
// Remote command to find the latest log file and tail it
|
||||
const tailCmd = `
|
||||
latest_log=$(ls -t ${logDir}/*.log 2>/dev/null | head -n 1)
|
||||
if [ -z "$latest_log" ]; then
|
||||
echo "❌ No logs found for this job yet."
|
||||
exit 1
|
||||
fi
|
||||
echo "📄 Tailing: $latest_log"
|
||||
tail -f "$latest_log"
|
||||
`;
|
||||
|
||||
spawnSync(`ssh -F ${sshConfigPath} ${remoteHost} ${JSON.stringify(tailCmd)}`, { stdio: 'inherit', shell: true });
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runLogs(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,205 @@
|
||||
/**
|
||||
* Workspace Orchestrator (Local)
|
||||
*
|
||||
* Central coordination of remote tasks.
|
||||
* Wakes workers, prepares worktrees, and launches tmux sessions.
|
||||
*/
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
function q(str: string) {
|
||||
return `'${str.replace(/'/g, "'\\''")}'`;
|
||||
}
|
||||
|
||||
export async function runOrchestrator(args: string[], env: NodeJS.ProcessEnv = process.env) {
|
||||
let prNumber = args[0];
|
||||
let action = args[1] || 'review';
|
||||
|
||||
// Handle "shell" mode: npm run workspace:shell [identifier]
|
||||
const isShellMode = prNumber === 'shell';
|
||||
if (isShellMode) {
|
||||
prNumber = args[1] || `adhoc-${Math.floor(Math.random() * 10000)}`;
|
||||
action = 'shell';
|
||||
}
|
||||
|
||||
if (!prNumber) {
|
||||
console.error('❌ Usage: npm run workspace <PR_NUMBER> [action] OR npm run workspace:shell [identifier]');
|
||||
return 1;
|
||||
}
|
||||
|
||||
// 1. Load Settings
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (!fs.existsSync(settingsPath)) {
|
||||
console.error('❌ Workspace settings not found. Run "npm run workspace:setup" first.');
|
||||
return 1;
|
||||
}
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.workspace;
|
||||
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
const provider = ProviderFactory.getProvider({ projectId: config.projectId, zone: config.zone, instanceName: targetVM });
|
||||
|
||||
// 2. Wake Worker & Verify Container
|
||||
await provider.ensureReady();
|
||||
|
||||
// Retrieve the remote user to ensure we run git commands correctly
|
||||
const whoamiRes = await provider.getExecOutput('whoami');
|
||||
const remoteUser = whoamiRes.stdout.trim();
|
||||
|
||||
// Paths - Unified across host and container
|
||||
const hostWorkspaceRoot = `/home/node/.workspaces`;
|
||||
const hostWorkDir = `${hostWorkspaceRoot}/main`;
|
||||
const containerHome = '/home/node';
|
||||
const containerWorkspaceRoot = `/home/node/.workspaces`;
|
||||
|
||||
const remotePolicyPath = `${containerWorkspaceRoot}/policies/workspace-policy.toml`;
|
||||
const persistentScripts = `${containerWorkspaceRoot}/scripts`;
|
||||
const sessionName = `workspace-${prNumber}-${action}`;
|
||||
const remoteWorktreeDir = `${containerWorkspaceRoot}/worktrees/${sessionName}`;
|
||||
const hostWorktreeDir = `${hostWorkspaceRoot}/worktrees/${sessionName}`;
|
||||
|
||||
// 3. Remote Context Setup (Executed on HOST for permission simplicity)
|
||||
console.log(`🚀 Preparing remote environment for ${action} on ${isShellMode ? 'branch/id' : '#'}${prNumber}...`);
|
||||
|
||||
// FIX: Use the host path to check for existence
|
||||
const check = await provider.getExecOutput(`ls -d ${hostWorktreeDir}/.git`);
|
||||
|
||||
// FIX: Ensure container user (node) owns the workspaces directories
|
||||
console.log(' - Synchronizing container permissions...');
|
||||
await provider.exec(`sudo chown -R 1000:1000 /home/node/.workspaces`);
|
||||
if (check.status !== 0) {
|
||||
console.log(` - Provisioning isolated git worktree for ${prNumber}...`);
|
||||
|
||||
// We run these on the host. Since setup might have left the repo root-owned, we use sudo.
|
||||
// We use environment variables to bypass safe.directory checks on a read-only filesystem.
|
||||
const gitEnv = `GIT_CONFIG_COUNT=1 GIT_CONFIG_KEY_0=safe.directory GIT_CONFIG_VALUE_0=${hostWorkDir}`;
|
||||
|
||||
const gitFetch = isShellMode
|
||||
? `sudo ${gitEnv} git -C ${hostWorkDir} fetch --quiet origin`
|
||||
: `sudo ${gitEnv} git -C ${hostWorkDir} fetch --quiet upstream pull/${prNumber}/head`;
|
||||
|
||||
const gitTarget = isShellMode ? 'FETCH_HEAD' : 'FETCH_HEAD';
|
||||
|
||||
const setupCmd = `
|
||||
sudo mkdir -p ${hostWorkspaceRoot}/worktrees && \
|
||||
sudo chown chronos:chronos ${hostWorkspaceRoot}/worktrees && \
|
||||
${gitFetch} && \
|
||||
sudo ${gitEnv} git -C ${hostWorkDir} worktree add --quiet -f ${hostWorktreeDir} ${gitTarget} 2>&1 && \
|
||||
sudo chown -R 1000:1000 ${hostWorkspaceRoot}
|
||||
`;
|
||||
const setupRes = await provider.getExecOutput(setupCmd);
|
||||
if (setupRes.status !== 0) {
|
||||
console.error(' ❌ Failed to provision remote worktree.');
|
||||
console.error(' STDOUT:', setupRes.stdout);
|
||||
console.error(' STDERR:', setupRes.stderr);
|
||||
return 1;
|
||||
}
|
||||
console.log(' ✅ Worktree provisioned successfully.');
|
||||
} else {
|
||||
console.log(' ✅ Remote worktree ready.');
|
||||
}
|
||||
|
||||
// AUTH: Dynamically retrieve credentials from host-side config/disk
|
||||
const remoteConfigPath = `${hostWorkspaceRoot}/gemini-cli-config/.gemini/settings.json`;
|
||||
const remoteSettingsRes = await provider.getExecOutput(`cat ${remoteConfigPath}`);
|
||||
const remoteSettingsJson = remoteSettingsRes.stdout.trim();
|
||||
|
||||
const apiKeyRes = await provider.getExecOutput(`cat ${remoteConfigPath} | grep apiKey | cut -d '\"' -f 4`);
|
||||
const remoteApiKey = apiKeyRes.stdout.trim();
|
||||
|
||||
const ghTokenRes = await provider.getExecOutput(`cat ${hostWorkspaceRoot}/.gh_token`);
|
||||
const remoteGhToken = ghTokenRes.stdout.trim();
|
||||
|
||||
// AUTH: Inject credentials and settings directly into the worktree
|
||||
console.log(' - Injecting remote authentication and UI context...');
|
||||
const dotEnvContent = `
|
||||
GEMINI_API_KEY=${remoteApiKey}
|
||||
COLORTERM=truecolor
|
||||
TERM=xterm-256color
|
||||
GEMINI_AUTO_UPDATE=0
|
||||
GEMINI_SANDBOX=workspace
|
||||
GEMINI_HOST=${targetVM}
|
||||
`.trim();
|
||||
await provider.exec(`sudo docker exec maintainer-worker sh -c ${q(`echo ${q(dotEnvContent)} > ${remoteWorktreeDir}/.env`)}`);
|
||||
|
||||
// Also inject the settings.json into the worktree's .gemini folder for maximum reliability
|
||||
await provider.exec(`sudo docker exec maintainer-worker sh -c ${q(`mkdir -p ${remoteWorktreeDir}/.gemini && echo ${q(remoteSettingsJson)} > ${remoteWorktreeDir}/.gemini/settings.json`)}`);
|
||||
|
||||
// 4. Execution Logic
|
||||
// In shell mode, we just start gemini. In action mode, we run the entrypoint.
|
||||
const remoteWorker = isShellMode
|
||||
? `gemini`
|
||||
: `tsx ${persistentScripts}/entrypoint.ts ${prNumber} . ${remotePolicyPath} ${action}`;
|
||||
|
||||
const authEnv = `-e GEMINI_AUTO_UPDATE=0 ${remoteApiKey ? `-e GEMINI_API_KEY=${remoteApiKey} ` : ''}${remoteGhToken ? `-e GITHUB_TOKEN=${remoteGhToken} -e GH_TOKEN=${remoteGhToken} ` : ''}`;
|
||||
|
||||
// PERSISTENCE: Wrap the entire execution in a tmux session inside the container
|
||||
// We HIDE the tmux status bar to reduce visual noise
|
||||
const tmuxStyle = `
|
||||
tmux set -g status off;
|
||||
`.replace(/\n/g, '');
|
||||
|
||||
const tmuxCmd = `tmux new-session -A -s ${sessionName} ${q(`${tmuxStyle} cd ${remoteWorktreeDir} && ${remoteWorker}; exec $SHELL`)}`;
|
||||
const containerWrap = `sudo docker exec -it -e COLORTERM=truecolor -e TERM=xterm-256color ${authEnv}maintainer-worker sh -c ${q(tmuxCmd)}`;
|
||||
|
||||
const finalSSH = provider.getRunCommand(containerWrap, { interactive: true });
|
||||
|
||||
const isWithinGemini = !!env.GEMINI_CLI || !!env.GEMINI_SESSION_ID || !!env.GCLI_SESSION_ID;
|
||||
|
||||
// 1.5 Handle --open override
|
||||
const openIdx = args.indexOf('--open');
|
||||
let terminalTarget = config.terminalTarget || 'tab';
|
||||
if (openIdx !== -1 && args[openIdx + 1]) {
|
||||
terminalTarget = args[openIdx + 1];
|
||||
}
|
||||
|
||||
const forceMainTerminal = terminalTarget === 'foreground';
|
||||
|
||||
if (!forceMainTerminal && isWithinGemini && env.TERM_PROGRAM === 'iTerm.app') {
|
||||
const tempCmdPath = path.join(process.env.TMPDIR || '/tmp', `workspace-ssh-${prNumber}.sh`);
|
||||
fs.writeFileSync(tempCmdPath, `#!/bin/bash\n${finalSSH}\nrm "$0"`, { mode: 0o755 });
|
||||
|
||||
const appleScript = terminalTarget === 'window' ? `
|
||||
on run argv
|
||||
tell application "iTerm"
|
||||
set newWindow to (create window with default profile)
|
||||
tell current session of newWindow
|
||||
write text (quoted form of item 1 of argv) & return
|
||||
end tell
|
||||
activate
|
||||
end tell
|
||||
end run
|
||||
` : `
|
||||
on run argv
|
||||
tell application "iTerm"
|
||||
tell current window
|
||||
set newTab to (create tab with default profile)
|
||||
tell current session of newTab
|
||||
write text (quoted form of item 1 of argv) & return
|
||||
end tell
|
||||
end tell
|
||||
activate
|
||||
end tell
|
||||
end run
|
||||
`;
|
||||
spawnSync('osascript', ['-', tempCmdPath], { input: appleScript });
|
||||
console.log(`✅ iTerm2 ${terminalTarget} opened for job ${prNumber}.`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Fallback: Run in current terminal
|
||||
console.log(`📡 Connecting to session ${sessionName}...`);
|
||||
spawnSync(finalSSH, { stdio: 'inherit', shell: true });
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runOrchestrator(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
|
||||
export async function runFixPlaybook(prNumber: string, targetDir: string, policyPath: string, geminiBin: string) {
|
||||
console.log(`🚀 Workspace | FIX | PR #${prNumber}`);
|
||||
console.log('Switching to agentic fix loop inside Gemini CLI...');
|
||||
|
||||
// Use the nightly gemini binary to activate the fix-pr skill and iterate
|
||||
// Note: Gemini doesn't support --cwd, so the caller (worker.ts) must ensure we are already in targetDir
|
||||
const result = spawnSync(geminiBin, [
|
||||
'--policy', policyPath,
|
||||
'-p', `Please activate the 'fix-pr' skill and use it to iteratively fix PR #${prNumber}.
|
||||
Ensure you handle CI failures, merge conflicts, and unaddressed review comments
|
||||
until the PR is fully passing and mergeable.`
|
||||
], { stdio: 'inherit' });
|
||||
|
||||
return result?.status ?? 1;
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
import { TaskRunner } from '../TaskRunner.js';
|
||||
import path from 'path';
|
||||
import { spawnSync } from 'child_process';
|
||||
|
||||
import { TaskRunner } from '../TaskRunner.js';
|
||||
import path from 'path';
|
||||
import { spawnSync } from 'child_process';
|
||||
import fs from 'fs';
|
||||
|
||||
export async function runImplementPlaybook(issueNumber: string, workDir: string, policyPath: string, geminiBin: string) {
|
||||
console.log(`🚀 Workspace | IMPLEMENT (Supervisor Loop) | Issue #${issueNumber}`);
|
||||
|
||||
const ghView = spawnSync('gh', ['issue', 'view', issueNumber, '--json', 'title,body', '-q', '{title:.title,body:.body}'], { shell: true });
|
||||
const meta = JSON.parse(ghView.stdout.toString());
|
||||
const branchName = `impl/${issueNumber}-${meta.title.toLowerCase().replace(/[^a-z0-9]/g, '-')}`.slice(0, 50);
|
||||
|
||||
// 1. Initial Research & Test Creation
|
||||
console.log('\n🧠 Phase 1: Research & Reproduction...');
|
||||
spawnSync(geminiBin, [
|
||||
'--policy', policyPath, '--cwd', workDir,
|
||||
'-p', `Research Issue #${issueNumber}: "${meta.title}".
|
||||
Description: ${meta.body}.
|
||||
ACTION: Create a NEW Vitest test file in 'tests/repro_issue_${issueNumber}.test.ts' that demonstrates the issue or feature.
|
||||
Ensure this test fails currently.`
|
||||
], { stdio: 'inherit' });
|
||||
|
||||
// 2. The Self-Healing Loop
|
||||
let attempts = 0;
|
||||
const maxAttempts = 5;
|
||||
let success = false;
|
||||
|
||||
console.log('\n🛠️ Phase 2: Implementation Loop...');
|
||||
while (attempts < maxAttempts && !success) {
|
||||
attempts++;
|
||||
console.log(`\n👉 Attempt ${attempts}/${maxAttempts}...`);
|
||||
|
||||
// Run the specific repro test
|
||||
const testRun = spawnSync('npx', ['vitest', 'run', `tests/repro_issue_${issueNumber}.test.ts`], { cwd: workDir });
|
||||
|
||||
if (testRun.status === 0) {
|
||||
console.log('✅ Reproduction test PASSED!');
|
||||
success = true;
|
||||
break;
|
||||
}
|
||||
|
||||
console.log('❌ Test failed. Asking Gemini to fix the implementation...');
|
||||
const testError = testRun.stdout.toString() + testRun.stderr.toString();
|
||||
|
||||
spawnSync(geminiBin, [
|
||||
'--policy', policyPath, '--cwd', workDir,
|
||||
'-p', `The reproduction test for Issue #${issueNumber} is still failing.
|
||||
ERROR OUTPUT:
|
||||
${testError.slice(-2000)}
|
||||
|
||||
ACTION: Modify the source code to fix this error and make the test pass.
|
||||
Do not modify the test itself unless it has a syntax error.`
|
||||
], { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
// 3. Final Verification
|
||||
if (success) {
|
||||
console.log('\n🧪 Phase 3: Final Verification...');
|
||||
const finalCheck = spawnSync('npm', ['test'], { cwd: workDir, stdio: 'inherit' });
|
||||
if (finalCheck.status === 0) {
|
||||
console.log('\n🎉 Implementation complete and verified!');
|
||||
spawnSync('git', ['add', '.'], { cwd: workDir });
|
||||
spawnSync('git', ['commit', '-m', `feat: implement issue #${issueNumber}`], { cwd: workDir });
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
console.error('\n❌ Supervisor: Failed to reach a passing state within retry limit.');
|
||||
return 1;
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { TaskRunner } from '../TaskRunner.ts';
|
||||
import path from 'path';
|
||||
|
||||
export async function runReadyPlaybook(prNumber: string, targetDir: string, policyPath: string, geminiBin: string) {
|
||||
const runner = new TaskRunner(
|
||||
path.join(targetDir, `.gemini/logs/workspace-${prNumber}`),
|
||||
`🚀 Workspace | READY | PR #${prNumber}`
|
||||
);
|
||||
|
||||
runner.register([
|
||||
{ id: 'clean', name: 'Clean Workspace', cmd: `npm run clean && npm ci` },
|
||||
{ id: 'preflight', name: 'Full Preflight', cmd: `npm run preflight`, dep: 'clean' },
|
||||
{ id: 'conflicts', name: 'Main Conflict Check', cmd: `git fetch origin main && git merge-base --is-ancestor origin/main HEAD` }
|
||||
]);
|
||||
|
||||
return runner.run();
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
import { TaskRunner } from '../TaskRunner.ts';
|
||||
import path from 'path';
|
||||
|
||||
export async function runReviewPlaybook(prNumber: string, targetDir: string, policyPath: string, geminiBin: string) {
|
||||
const runner = new TaskRunner(
|
||||
path.join(targetDir, `.gemini/logs/workspace-${prNumber}`),
|
||||
`🚀 Workspace | REVIEW | PR #${prNumber}`
|
||||
);
|
||||
|
||||
runner.register([
|
||||
{ id: 'build', name: 'Fast Build', cmd: `cd ${targetDir} && npm ci && npm run build` },
|
||||
{ id: 'ci', name: 'CI Checks', cmd: `gh pr checks ${prNumber}` },
|
||||
{ id: 'review', name: 'Workspaceed Review', cmd: `cd ${targetDir} && ${geminiBin} --policy ${policyPath} -p "Please activate the 'review-pr' skill and use it to conduct a behavioral review of PR #${prNumber}."` }
|
||||
]);
|
||||
|
||||
return runner.run();
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* WorkspaceProvider interface defines the contract for different remote
|
||||
* execution environments (GCE, Workstations, etc.).
|
||||
*/
|
||||
export interface WorkspaceProvider {
|
||||
/**
|
||||
* Provisions the underlying infrastructure.
|
||||
*/
|
||||
provision(): Promise<number>;
|
||||
|
||||
/**
|
||||
* Ensures the workspace is running and accessible.
|
||||
*/
|
||||
ensureReady(): Promise<number>;
|
||||
|
||||
/**
|
||||
* Performs the initial setup of the workspace (SSH, scripts, auth).
|
||||
*/
|
||||
setup(options: SetupOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Returns the raw command string that would be used to execute a command.
|
||||
*/
|
||||
getRunCommand(command: string, options?: ExecOptions): string;
|
||||
|
||||
/**
|
||||
* Executes a command on the workspace.
|
||||
*/
|
||||
exec(command: string, options?: ExecOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Executes a command on the workspace and returns the output.
|
||||
*/
|
||||
getExecOutput(command: string, options?: ExecOptions): Promise<{ status: number; stdout: string; stderr: string }>;
|
||||
|
||||
/**
|
||||
* Synchronizes local files to the workspace.
|
||||
*/
|
||||
sync(localPath: string, remotePath: string, options?: SyncOptions): Promise<number>;
|
||||
|
||||
/**
|
||||
* Returns the status of the workspace.
|
||||
*/
|
||||
getStatus(): Promise<WorkspaceStatus>;
|
||||
|
||||
/**
|
||||
* Stops the workspace to save costs.
|
||||
*/
|
||||
stop(): Promise<number>;
|
||||
}
|
||||
|
||||
export interface SetupOptions {
|
||||
projectId: string;
|
||||
zone: string;
|
||||
dnsSuffix?: string;
|
||||
syncAuth?: boolean;
|
||||
}
|
||||
|
||||
export interface ExecOptions {
|
||||
interactive?: boolean;
|
||||
cwd?: string;
|
||||
wrapContainer?: string;
|
||||
}
|
||||
|
||||
export interface SyncOptions {
|
||||
delete?: boolean;
|
||||
exclude?: string[];
|
||||
sudo?: boolean;
|
||||
}
|
||||
|
||||
export interface WorkspaceStatus {
|
||||
name: string;
|
||||
status: string;
|
||||
internalIp?: string;
|
||||
externalIp?: string;
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { spawnSync } from 'child_process';
|
||||
import os from 'os';
|
||||
|
||||
/**
|
||||
* Centralized SSH/RSYNC management for GCE Workers.
|
||||
* Handles Magic Hostname routing with Zero-Knowledge security.
|
||||
* STRICTLY uses Direct Internal connection (Corporate Magic).
|
||||
*/
|
||||
export class GceConnectionManager {
|
||||
private projectId: string;
|
||||
private zone: string;
|
||||
private instanceName: string;
|
||||
|
||||
constructor(projectId: string, zone: string, instanceName: string) {
|
||||
this.projectId = projectId;
|
||||
this.zone = zone;
|
||||
this.instanceName = instanceName;
|
||||
}
|
||||
|
||||
getMagicRemote(): string {
|
||||
const user = `${process.env.USER || 'node'}_google_com`;
|
||||
const dnsSuffix = '.internal.gcpnode.com';
|
||||
return `${user}@nic0.${this.instanceName}.${this.zone}.c.${this.projectId}${dnsSuffix}`;
|
||||
}
|
||||
|
||||
getCommonArgs(): string[] {
|
||||
return [
|
||||
'-o', 'StrictHostKeyChecking=no',
|
||||
'-o', 'UserKnownHostsFile=/dev/null',
|
||||
'-o', 'LogLevel=ERROR',
|
||||
'-o', 'ConnectTimeout=60',
|
||||
'-o', 'ServerAliveInterval=30',
|
||||
'-o', 'ServerAliveCountMax=3',
|
||||
'-o', 'SendEnv=USER',
|
||||
'-i', `${os.homedir()}/.ssh/google_compute_engine`
|
||||
];
|
||||
}
|
||||
|
||||
getRunCommand(command: string, options: { interactive?: boolean } = {}): string {
|
||||
const fullRemote = this.getMagicRemote();
|
||||
return `ssh ${this.getCommonArgs().join(' ')} ${options.interactive ? '-t' : ''} ${fullRemote} ${this.quote(command)}`;
|
||||
}
|
||||
|
||||
run(command: string, options: { interactive?: boolean; stdio?: 'pipe' | 'inherit' } = {}): { status: number; stdout: string; stderr: string } {
|
||||
const sshCmd = this.getRunCommand(command, options);
|
||||
const res = spawnSync(sshCmd, { stdio: options.stdio || 'pipe', shell: true });
|
||||
return {
|
||||
status: res.status ?? 1,
|
||||
stdout: res.stdout?.toString() || '',
|
||||
stderr: res.stderr?.toString() || ''
|
||||
};
|
||||
}
|
||||
|
||||
sync(localPath: string, remotePath: string, options: { delete?: boolean; exclude?: string[]; sudo?: boolean } = {}): number {
|
||||
const fullRemote = this.getMagicRemote();
|
||||
// We use --no-t and --no-perms to avoid "Operation not permitted" errors
|
||||
// when syncing to volumes that might have UID mismatches with the container.
|
||||
const rsyncArgs = ['-rvz', '--quiet', '--no-t', '--no-perms', '--no-owner', '--no-group'];
|
||||
if (options.delete) rsyncArgs.push('--delete');
|
||||
if (options.exclude) options.exclude.forEach(ex => rsyncArgs.push(`--exclude="${ex}"`));
|
||||
|
||||
// Use sudo on the remote side if requested to bypass permission errors
|
||||
if (options.sudo) {
|
||||
rsyncArgs.push('--rsync-path="sudo rsync"');
|
||||
}
|
||||
|
||||
const sshCmd = `ssh ${this.getCommonArgs().join(' ')}`;
|
||||
const directRsync = `rsync ${rsyncArgs.join(' ')} -e ${this.quote(sshCmd)} ${localPath} ${fullRemote}:${remotePath}`;
|
||||
|
||||
const res = spawnSync(directRsync, { stdio: 'inherit', shell: true });
|
||||
return res.status ?? 1;
|
||||
}
|
||||
|
||||
private quote(str: string) {
|
||||
return `'${str.replace(/'/g, "'\\''")}'`;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,304 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import { WorkspaceProvider, SetupOptions, ExecOptions, SyncOptions, WorkspaceStatus } from './BaseProvider.ts';
|
||||
import { GceConnectionManager } from './GceConnectionManager.ts';
|
||||
|
||||
export class GceCosProvider implements WorkspaceProvider {
|
||||
private projectId: string;
|
||||
private zone: string;
|
||||
private instanceName: string;
|
||||
private sshConfigPath: string;
|
||||
private knownHostsPath: string;
|
||||
private sshAlias = 'gcli-worker';
|
||||
private conn: GceConnectionManager;
|
||||
|
||||
constructor(projectId: string, zone: string, instanceName: string, repoRoot: string) {
|
||||
this.projectId = projectId;
|
||||
this.zone = zone;
|
||||
this.instanceName = instanceName;
|
||||
const workspacesDir = path.join(repoRoot, '.gemini/workspaces');
|
||||
if (!fs.existsSync(workspacesDir)) fs.mkdirSync(workspacesDir, { recursive: true });
|
||||
this.sshConfigPath = path.join(workspacesDir, 'ssh_config');
|
||||
this.knownHostsPath = path.join(workspacesDir, 'known_hosts');
|
||||
this.conn = new GceConnectionManager(projectId, zone, instanceName);
|
||||
}
|
||||
|
||||
async provision(): Promise<number> {
|
||||
const imageUri = 'us-docker.pkg.dev/gemini-code-dev/gemini-cli/maintainer:latest';
|
||||
const region = this.zone.split('-').slice(0, 2).join('-');
|
||||
const vpcName = 'iap-vpc';
|
||||
const subnetName = 'iap-subnet';
|
||||
|
||||
console.log(`🏗️ Ensuring "Magic" Network Infrastructure in ${this.projectId}...`);
|
||||
|
||||
const vpcCheck = spawnSync('gcloud', ['compute', 'networks', 'describe', vpcName, '--project', this.projectId], { stdio: 'pipe' });
|
||||
if (vpcCheck.status !== 0) {
|
||||
spawnSync('gcloud', ['compute', 'networks', 'create', vpcName, '--project', this.projectId, '--subnet-mode=custom'], { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
const subnetCheck = spawnSync('gcloud', ['compute', 'networks', 'subnets', 'describe', subnetName, '--project', this.projectId, '--region', region], { stdio: 'pipe' });
|
||||
if (subnetCheck.status !== 0) {
|
||||
spawnSync('gcloud', ['compute', 'networks', 'subnets', 'create', subnetName,
|
||||
'--project', this.projectId, '--network', vpcName, '--region', region,
|
||||
'--range=10.0.0.0/24', '--enable-private-ip-google-access'], { stdio: 'inherit' });
|
||||
} else {
|
||||
spawnSync('gcloud', ['compute', 'networks', 'subnets', 'update', subnetName, '--project', this.projectId, '--region', region, '--enable-private-ip-google-access'], { stdio: 'pipe' });
|
||||
}
|
||||
|
||||
const fwCheck = spawnSync('gcloud', ['compute', 'firewall-rules', 'describe', 'allow-corporate-ssh', '--project', this.projectId], { stdio: 'pipe' });
|
||||
if (fwCheck.status !== 0) {
|
||||
spawnSync('gcloud', ['compute', 'firewall-rules', 'create', 'allow-corporate-ssh',
|
||||
'--project', this.projectId, '--network', vpcName, '--allow=tcp:22', '--source-ranges=0.0.0.0/0'], { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
console.log(`🚀 Provisioning GCE COS worker: ${this.instanceName} (Unified Workspace Setup)...`);
|
||||
|
||||
const startupScriptContent = `#!/bin/bash
|
||||
set -e
|
||||
echo "🚀 Initializing Unified Workspace..."
|
||||
|
||||
# 1. Mount Data Disk
|
||||
mkdir -p /mnt/disks/data
|
||||
if ! mountpoint -q /mnt/disks/data; then
|
||||
DATA_DISK="/dev/disk/by-id/google-data"
|
||||
[ -e "$DATA_DISK" ] || DATA_DISK="/dev/sdb"
|
||||
|
||||
while [ ! -e "$DATA_DISK" ]; do echo "Waiting for data disk..."; sleep 1; done
|
||||
blkid "$DATA_DISK" || mkfs.ext4 -m 0 -F "$DATA_DISK"
|
||||
mount -o discard,defaults "$DATA_DISK" /mnt/disks/data
|
||||
fi
|
||||
|
||||
# 2. Prepare Stateful Directories (on the persistent disk)
|
||||
mkdir -p /mnt/disks/data/main /mnt/disks/data/worktrees /mnt/disks/data/scripts /mnt/disks/data/config /mnt/disks/data/policies
|
||||
chmod -R 777 /mnt/disks/data
|
||||
|
||||
# 3. Handle Unified Path Symlink (/home/node/.workspaces)
|
||||
# This ensures absolute paths match perfectly between host and container.
|
||||
mkdir -p /home/node
|
||||
ln -sfn /mnt/disks/data /home/node/.workspaces
|
||||
chown -R 1000:1000 /home/node
|
||||
|
||||
# Also ensure host users can find it
|
||||
ln -sfn /mnt/disks/data /workspaces
|
||||
chmod 777 /workspaces
|
||||
for h in /home/*_google_com; do
|
||||
[ -d "$h" ] || continue
|
||||
ln -sfn /mnt/disks/data "$h/.workspaces"
|
||||
chown -h $(basename $h):$(basename $h) "$h/.workspaces"
|
||||
done
|
||||
|
||||
# 4. Container Resilience Loop
|
||||
until docker info >/dev/null 2>&1; do echo "Waiting for docker..."; sleep 2; done
|
||||
|
||||
for i in {1..5}; do
|
||||
docker pull ${imageUri} && break || (echo "Pull failed, retry $i..." && sleep 5)
|
||||
done
|
||||
|
||||
if ! docker ps -a | grep -q "maintainer-worker"; then
|
||||
docker run -d --name maintainer-worker --restart always \\
|
||||
-v /mnt/disks/data:/home/node/.workspaces:rw \\
|
||||
-v /mnt/disks/data/gemini-cli-config/.gemini:/home/node/.gemini:rw \\
|
||||
-v ~/.config/gh:/home/node/.config/gh:rw \\
|
||||
${imageUri} /bin/bash -c "while true; do sleep 1000; done"
|
||||
fi
|
||||
echo "✅ Unified Workspace is active."
|
||||
`;
|
||||
|
||||
const tmpScriptPath = path.join(os.tmpdir(), `gcli-startup-${Date.now()}.sh`);
|
||||
fs.writeFileSync(tmpScriptPath, startupScriptContent);
|
||||
|
||||
const result = spawnSync('gcloud', [
|
||||
'compute', 'instances', 'create', this.instanceName,
|
||||
'--project', this.projectId,
|
||||
'--zone', this.zone,
|
||||
'--machine-type', 'n2-standard-8',
|
||||
'--image-family', 'cos-stable',
|
||||
'--image-project', 'cos-cloud',
|
||||
'--boot-disk-size', '10GB',
|
||||
'--boot-disk-type', 'pd-balanced',
|
||||
'--create-disk', `name=${this.instanceName}-data,size=200,type=pd-balanced,device-name=data,auto-delete=yes`,
|
||||
'--metadata-from-file', `startup-script=${tmpScriptPath}`,
|
||||
'--metadata', 'enable-oslogin=TRUE',
|
||||
'--network-interface', `network=${vpcName},subnet=${subnetName},no-address`,
|
||||
'--scopes', 'https://www.googleapis.com/auth/cloud-platform',
|
||||
'--quiet'
|
||||
], { stdio: 'inherit' });
|
||||
|
||||
fs.unlinkSync(tmpScriptPath);
|
||||
|
||||
|
||||
if (result.status === 0) {
|
||||
console.log('⏳ Waiting for OS Login and SSH to initialize (this takes ~45s)...');
|
||||
await new Promise(r => setTimeout(r, 45000));
|
||||
}
|
||||
|
||||
return result.status ?? 1;
|
||||
}
|
||||
|
||||
async ensureReady(): Promise<number> {
|
||||
const status = await this.getStatus();
|
||||
if (status.status !== 'RUNNING') {
|
||||
console.log(`⚠️ Worker ${this.instanceName} is ${status.status}. Waking it up...`);
|
||||
const res = spawnSync('gcloud', [
|
||||
'compute', 'instances', 'start', this.instanceName,
|
||||
'--project', this.projectId,
|
||||
'--zone', this.zone
|
||||
], { stdio: 'inherit' });
|
||||
if (res.status !== 0) return res.status ?? 1;
|
||||
|
||||
console.log('⏳ Waiting for boot...');
|
||||
await new Promise(r => setTimeout(r, 20000));
|
||||
}
|
||||
|
||||
// NEW: Verify the container is actually running AND up to date
|
||||
console.log(' - Verifying remote container health and image version...');
|
||||
const containerCheck = await this.getExecOutput('sudo docker ps -q --filter "name=maintainer-worker"');
|
||||
|
||||
let needsUpdate = false;
|
||||
if (containerCheck.status === 0 && containerCheck.stdout.trim()) {
|
||||
// Check if the volume mounts are correct by checking for files inside .workspaces/main
|
||||
const mountCheck = await this.getExecOutput('sudo docker exec maintainer-worker ls -A /home/node/.workspaces/main');
|
||||
if (mountCheck.status !== 0 || !mountCheck.stdout.trim()) {
|
||||
console.log(' ⚠️ Remote container has incorrect or empty mounts. Triggering refresh...');
|
||||
needsUpdate = true;
|
||||
} else {
|
||||
// Check if the running image is stale
|
||||
const imageUri = 'us-docker.pkg.dev/gemini-code-dev/gemini-cli/maintainer:latest';
|
||||
const tmuxCheck = await this.getExecOutput('sudo docker exec maintainer-worker which tmux');
|
||||
if (tmuxCheck.status !== 0) {
|
||||
console.log(' ⚠️ Remote container is stale (missing tmux). Triggering update...');
|
||||
needsUpdate = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
needsUpdate = true;
|
||||
}
|
||||
|
||||
if (needsUpdate) {
|
||||
console.log(' ⚠️ Container missing or stale. Attempting refresh...');
|
||||
const imageUri = 'us-docker.pkg.dev/gemini-code-dev/gemini-cli/maintainer:latest';
|
||||
// Ensure data mount is available before running
|
||||
const recoverCmd = `
|
||||
(mountpoint -q /mnt/disks/data || sudo mount /dev/disk/by-id/google-data /mnt/disks/data) && \
|
||||
sudo docker pull ${imageUri} && \
|
||||
(sudo docker rm -f maintainer-worker || true) && \
|
||||
sudo docker run -d --name maintainer-worker --restart always \
|
||||
-v /mnt/disks/data:/home/node/.workspaces:rw \
|
||||
-v /mnt/disks/data/gemini-cli-config/.gemini:/home/node/.gemini:rw \
|
||||
-v ~/.config/gh:/home/node/.config/gh:rw \
|
||||
${imageUri} /bin/bash -c "while true; do sleep 1000; done"
|
||||
`;
|
||||
const recoverRes = await this.exec(recoverCmd);
|
||||
if (recoverRes !== 0) {
|
||||
console.error(' ❌ Critical: Failed to refresh maintainer container.');
|
||||
return 1;
|
||||
}
|
||||
console.log(' ✅ Container refreshed.');
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
async setup(options: SetupOptions): Promise<number> {
|
||||
const dnsSuffix = options.dnsSuffix || '.internal.gcpnode.com';
|
||||
const internalHostname = `nic0.${this.instanceName}.${this.zone}.c.${this.projectId}${dnsSuffix.startsWith('.') ? dnsSuffix : '.' + dnsSuffix}`;
|
||||
const user = `${process.env.USER || 'node'}_google_com`;
|
||||
|
||||
const sshEntry = `
|
||||
Host ${this.sshAlias}
|
||||
HostName ${internalHostname}
|
||||
IdentityFile ~/.ssh/google_compute_engine
|
||||
User ${user}
|
||||
UserKnownHostsFile /dev/null
|
||||
CheckHostIP no
|
||||
StrictHostKeyChecking no
|
||||
ConnectTimeout 60
|
||||
ServerAliveInterval 30
|
||||
`;
|
||||
|
||||
fs.writeFileSync(this.sshConfigPath, sshEntry);
|
||||
console.log(` ✅ Created project SSH config: ${this.sshConfigPath}`);
|
||||
|
||||
console.log(' - Verifying direct connection (may trigger corporate SSO prompt)...');
|
||||
const res = this.conn.run('echo 1');
|
||||
if (res.status !== 0) {
|
||||
console.error('\n❌ All connection attempts failed. Please ensure you have "gcert" and IAP permissions.');
|
||||
return 1;
|
||||
}
|
||||
console.log(' ✅ Connection verified. Waiting 10s for remote disk initialization...');
|
||||
await new Promise(r => setTimeout(r, 10000));
|
||||
return 0;
|
||||
}
|
||||
|
||||
getRunCommand(command: string, options: ExecOptions = {}): string {
|
||||
let finalCmd = command;
|
||||
if (options.wrapContainer) {
|
||||
finalCmd = `sudo docker exec ${options.interactive ? '-it' : ''} ${options.cwd ? `-w ${options.cwd}` : ''} ${options.wrapContainer} sh -c ${this.quote(command)}`;
|
||||
}
|
||||
return this.conn.getRunCommand(finalCmd, { interactive: options.interactive });
|
||||
}
|
||||
|
||||
async exec(command: string, options: ExecOptions = {}): Promise<number> {
|
||||
const res = await this.getExecOutput(command, options);
|
||||
return res.status;
|
||||
}
|
||||
|
||||
async getExecOutput(command: string, options: ExecOptions = {}): Promise<{ status: number; stdout: string; stderr: string }> {
|
||||
let finalCmd = command;
|
||||
if (options.wrapContainer) {
|
||||
finalCmd = `sudo docker exec ${options.interactive ? '-it' : ''} ${options.cwd ? `-w ${options.cwd}` : ''} ${options.wrapContainer} sh -c ${this.quote(command)}`;
|
||||
}
|
||||
|
||||
return this.conn.run(finalCmd, { interactive: options.interactive, stdio: options.interactive ? 'inherit' : 'pipe' });
|
||||
}
|
||||
|
||||
async sync(localPath: string, remotePath: string, options: SyncOptions = {}): Promise<number> {
|
||||
console.log(`📦 Syncing ${localPath} to remote:${remotePath}...`);
|
||||
return this.conn.sync(localPath, remotePath, options);
|
||||
}
|
||||
|
||||
async getStatus(): Promise<WorkspaceStatus> {
|
||||
const res = spawnSync('gcloud', [
|
||||
'compute', 'instances', 'describe', this.instanceName,
|
||||
'--project', this.projectId,
|
||||
'--zone', this.zone,
|
||||
'--format', 'json(name,status,networkInterfaces[0].networkIP)'
|
||||
], { stdio: 'pipe' });
|
||||
|
||||
if (res.status !== 0) {
|
||||
return { name: this.instanceName, status: 'UNKNOWN' };
|
||||
}
|
||||
|
||||
try {
|
||||
const data = JSON.parse(res.stdout.toString());
|
||||
return {
|
||||
name: data.name,
|
||||
status: data.status,
|
||||
internalIp: data.networkInterfaces?.[0]?.networkIP
|
||||
};
|
||||
} catch (e) {
|
||||
return { name: this.instanceName, status: 'ERROR' };
|
||||
}
|
||||
}
|
||||
|
||||
async stop(): Promise<number> {
|
||||
const res = spawnSync('gcloud', [
|
||||
'compute', 'instances', 'stop', this.instanceName,
|
||||
'--project', this.projectId,
|
||||
'--zone', this.zone
|
||||
], { stdio: 'inherit' });
|
||||
return res.status ?? 1;
|
||||
}
|
||||
|
||||
private quote(str: string) {
|
||||
return `'${str.replace(/'/g, "'\\''")}'`;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { GceCosProvider } from './GceCosProvider.ts';
|
||||
import { WorkspaceProvider } from './BaseProvider.ts';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../../..');
|
||||
|
||||
export class ProviderFactory {
|
||||
static getProvider(config: { projectId: string; zone: string; instanceName: string }): WorkspaceProvider {
|
||||
// Currently we only have GceCosProvider, but this is where we'd branch
|
||||
return new GceCosProvider(config.projectId, config.zone, config.instanceName, REPO_ROOT);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Ensure we have a valid environment for non-interactive startup
|
||||
export USER=${USER:-ubuntu}
|
||||
export HOME=/home/$USER
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
echo "🛠️ Provisioning High-Performance Gemini CLI Maintainer Worker..."
|
||||
|
||||
# Wait for apt lock
|
||||
wait_for_apt() {
|
||||
while sudo fuser /var/lib/dpkg/lock-frontend /var/lib/apt/lists/lock >/dev/null 2>&1 ; do
|
||||
sleep 2
|
||||
done
|
||||
}
|
||||
|
||||
wait_for_apt
|
||||
|
||||
# 1. System Essentials (Inc. libraries for native node modules)
|
||||
apt-get update && apt-get install -y \
|
||||
curl git git-lfs tmux build-essential unzip jq gnupg cron \
|
||||
libsecret-1-dev libkrb5-dev
|
||||
|
||||
# 2. GitHub CLI
|
||||
if ! command -v gh &> /dev/null; then
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null
|
||||
wait_for_apt
|
||||
apt-get update && apt-get install gh -y
|
||||
fi
|
||||
|
||||
# 3. Direct Node.js 20 Installation (NodeSource)
|
||||
if ! command -v node &> /dev/null; then
|
||||
echo "Installing Node.js 20..."
|
||||
curl -fsSL https://deb.nodesource.com/setup_20.x | bash -
|
||||
wait_for_apt
|
||||
apt-get install -y nodejs
|
||||
fi
|
||||
|
||||
# 4. Global Maintenance Tooling
|
||||
echo "Installing global developer tools..."
|
||||
npm install -g tsx vitest @google/gemini-cli@nightly
|
||||
|
||||
# 5. Pre-warm Repository (Main Hub)
|
||||
# We clone and build the main repo in the image so that new worktrees start with a warm cache
|
||||
REMOTE_WORK_DIR="$HOME/dev/main"
|
||||
mkdir -p "$HOME/dev"
|
||||
if [ ! -d "$REMOTE_WORK_DIR" ]; then
|
||||
echo "Pre-cloning and building repository..."
|
||||
git clone --filter=blob:none https://github.com/google-gemini/gemini-cli.git "$REMOTE_WORK_DIR"
|
||||
cd "$REMOTE_WORK_DIR"
|
||||
npm install --no-audit --no-fund
|
||||
npm run build
|
||||
fi
|
||||
|
||||
chown -R $USER:$USER $HOME/dev
|
||||
echo "✅ Provisioning Complete!"
|
||||
@@ -0,0 +1,383 @@
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import { fileURLToPath } from 'url';
|
||||
import readline from 'readline';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
/**
|
||||
* Loads and parses a local .env file from the repository root and the home directory.
|
||||
*/
|
||||
function loadDotEnv() {
|
||||
const envPaths = [
|
||||
path.join(REPO_ROOT, '.env'),
|
||||
path.join(os.homedir(), '.env')
|
||||
];
|
||||
|
||||
envPaths.forEach(envPath => {
|
||||
if (fs.existsSync(envPath)) {
|
||||
const content = fs.readFileSync(envPath, 'utf8');
|
||||
content.split('\n').forEach(line => {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) return;
|
||||
|
||||
const match = trimmed.match(/^([^=]+)=(.*)$/);
|
||||
if (match) {
|
||||
const key = match[1].trim();
|
||||
const val = match[2].trim().replace(/^["'](.*)["']$/, '$1');
|
||||
if (!process.env[key]) process.env[key] = val;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function prompt(question: string, defaultValue: string, explanation?: string, sensitive: boolean = false): Promise<string> {
|
||||
const autoAccept = process.argv.includes('--yes') || process.argv.includes('-y');
|
||||
if (autoAccept && defaultValue) return defaultValue;
|
||||
|
||||
if (explanation) {
|
||||
console.log(`\n📖 ${explanation}`);
|
||||
}
|
||||
|
||||
const displayDefault = sensitive && defaultValue ? `${defaultValue.substring(0, 4)}...${defaultValue.substring(defaultValue.length - 4)}` : defaultValue;
|
||||
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
||||
|
||||
const promptMsg = defaultValue
|
||||
? `❓ ${question} [Detected: ${displayDefault}] (Press <Enter> to keep, or type new value): `
|
||||
: `❓ ${question} (<Enter> for none): `;
|
||||
|
||||
return new Promise((resolve) => {
|
||||
rl.question(promptMsg, (answer) => {
|
||||
rl.close();
|
||||
resolve(answer.trim() || defaultValue);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function confirm(question: string): Promise<boolean> {
|
||||
const autoAccept = process.argv.includes('--yes') || process.argv.includes('-y');
|
||||
if (autoAccept) return true;
|
||||
|
||||
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
|
||||
return new Promise((resolve) => {
|
||||
rl.question(`❓ ${question} (y/n): `, (answer) => {
|
||||
rl.close();
|
||||
resolve(answer.trim().toLowerCase() === 'y');
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function createFork(upstream: string): Promise<string> {
|
||||
console.log(` - Creating fork for ${upstream}...`);
|
||||
const forkRes = spawnSync('gh', ['repo', 'fork', upstream, '--clone=false'], { stdio: 'inherit' });
|
||||
if (forkRes.status === 0) {
|
||||
const userRes = spawnSync('gh', ['api', 'user', '-q', '.login'], { stdio: 'pipe' });
|
||||
const user = userRes.stdout.toString().trim();
|
||||
return `${user}/${upstream.split('/')[1]}`;
|
||||
}
|
||||
return upstream;
|
||||
}
|
||||
|
||||
export async function runSetup(env: NodeJS.ProcessEnv = process.env) {
|
||||
loadDotEnv();
|
||||
|
||||
console.log(`
|
||||
================================================================================
|
||||
🚀 GEMINI WORKSPACES: HIGH-PERFORMANCE REMOTE DEVELOPMENT
|
||||
================================================================================
|
||||
Workspaces allow you to delegate heavy tasks (PR reviews, agentic fixes,
|
||||
and full builds) to a dedicated, high-performance GCP worker.
|
||||
================================================================================
|
||||
`);
|
||||
|
||||
console.log('📝 PHASE 1: CONFIGURATION');
|
||||
console.log('--------------------------------------------------------------------------------');
|
||||
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
let settings: any = {};
|
||||
let skipConfig = false;
|
||||
|
||||
if (fs.existsSync(settingsPath)) {
|
||||
try {
|
||||
settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
if (settings.workspace && !process.argv.includes('--reconfigure')) {
|
||||
console.log(' ✅ Existing configuration found.');
|
||||
const shouldSkip = await confirm('Use existing configuration and skip to execution?');
|
||||
if (shouldSkip) {
|
||||
skipConfig = true;
|
||||
}
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
// 1. Project Identity
|
||||
let projectId = settings.workspace?.projectId || '';
|
||||
let zone = settings.workspace?.zone || 'us-west1-a';
|
||||
let terminalTarget = settings.workspace?.terminalTarget || 'tab';
|
||||
let upstreamRepo = settings.workspace?.upstreamRepo || 'google-gemini/gemini-cli';
|
||||
let userFork = settings.workspace?.userFork || upstreamRepo;
|
||||
|
||||
if (!skipConfig) {
|
||||
const defaultProject = env.GOOGLE_CLOUD_PROJECT || env.WORKSPACE_PROJECT || projectId || '';
|
||||
projectId = await prompt('GCP Project ID', defaultProject,
|
||||
'The GCP Project where your workspace worker will live. Your personal project is recommended.');
|
||||
|
||||
if (!projectId) {
|
||||
console.error('❌ Project ID is required. Set GOOGLE_CLOUD_PROJECT or enter it manually.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
zone = await prompt('Compute Zone', env.WORKSPACE_ZONE || zone,
|
||||
'The physical location of your worker. us-west1-a is the team default.');
|
||||
|
||||
terminalTarget = await prompt('Terminal UI Target (foreground, background, tab, window)', env.WORKSPACE_TERM_TARGET || terminalTarget,
|
||||
'When you start a job in gemini-cli, should it run as a foreground shell, background shell (no attach), new iterm2 tab, or new iterm2 window?');
|
||||
|
||||
// 2. Repository Discovery (Dynamic)
|
||||
console.log('\n🔍 Detecting repository origins...');
|
||||
|
||||
const repoInfoRes = spawnSync('gh', ['repo', 'view', '--json', 'nameWithOwner,parent,isFork'], { stdio: 'pipe' });
|
||||
|
||||
if (repoInfoRes.status === 0) {
|
||||
try {
|
||||
const repoInfo = JSON.parse(repoInfoRes.stdout.toString());
|
||||
upstreamRepo = repoInfo.isFork && repoInfo.parent ? repoInfo.parent.nameWithOwner : repoInfo.nameWithOwner;
|
||||
|
||||
console.log(` - Upstream identified: ${upstreamRepo}`);
|
||||
console.log(` - Searching for your forks of ${upstreamRepo}...`);
|
||||
|
||||
const upstreamOwner = upstreamRepo.split('/')[0];
|
||||
const upstreamName = upstreamRepo.split('/')[1];
|
||||
|
||||
const gqlQuery = `query { viewer { repositories(first: 100, isFork: true, affiliations: OWNER) { nodes { nameWithOwner parent { nameWithOwner } } } } }`;
|
||||
const forksRes = spawnSync('gh', ['api', 'graphql', '-f', `query=${gqlQuery}`, '--jq', `.data.viewer.repositories.nodes[] | select(.parent.nameWithOwner == "${upstreamRepo}") | .nameWithOwner`], { stdio: 'pipe' });
|
||||
const myForks = forksRes.stdout.toString().trim().split('\n').filter(Boolean);
|
||||
|
||||
if (myForks.length > 0) {
|
||||
console.log('\n🍴 Found existing forks:');
|
||||
myForks.forEach((name: string, i: number) => console.log(` [${i + 1}] ${name}`));
|
||||
console.log(` [c] Create a new fork`);
|
||||
console.log(` [u] Use upstream directly (not recommended)`);
|
||||
|
||||
const choice = await prompt('Select an option', '1');
|
||||
if (choice.toLowerCase() === 'c') {
|
||||
userFork = await createFork(upstreamRepo);
|
||||
} else if (choice.toLowerCase() === 'u') {
|
||||
userFork = upstreamRepo;
|
||||
} else {
|
||||
const idx = parseInt(choice) - 1;
|
||||
userFork = myForks[idx] || myForks[0];
|
||||
}
|
||||
} else {
|
||||
const shouldFork = await confirm('No fork detected. Create a personal fork for sandboxed implementations?');
|
||||
userFork = shouldFork ? await createFork(upstreamRepo) : upstreamRepo;
|
||||
}
|
||||
} catch (e) {
|
||||
userFork = upstreamRepo;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` ✅ Upstream: ${upstreamRepo}`);
|
||||
console.log(` ✅ Workspace: ${userFork}`);
|
||||
}
|
||||
|
||||
// 3. Security & Auth (Always check for token if init is needed)
|
||||
let githubToken = env.WORKSPACE_GH_TOKEN || '';
|
||||
if (!skipConfig) {
|
||||
if (!githubToken) {
|
||||
const hasToken = await confirm('\nDo you already have a GitHub Personal Access Token (PAT) with "Read/Write" access to contents & PRs?');
|
||||
if (hasToken) {
|
||||
githubToken = await prompt('Paste Scoped Token', '');
|
||||
} else {
|
||||
const shouldGenToken = await confirm('Would you like to generate a new scoped token now? (Highly Recommended)');
|
||||
if (shouldGenToken) {
|
||||
const baseUrl = 'https://github.com/settings/personal-access-tokens/new';
|
||||
const name = `Workspace-${env.USER}`;
|
||||
const repoParams = userFork !== upstreamRepo
|
||||
? `&repositories[]=${encodeURIComponent(upstreamRepo)}&repositories[]=${encodeURIComponent(userFork)}`
|
||||
: `&repositories[]=${encodeURIComponent(upstreamRepo)}`;
|
||||
|
||||
const magicLink = `${baseUrl}?name=${encodeURIComponent(name)}&description=Gemini+Workspaces+Worker${repoParams}&contents=write&pull_requests=write&metadata=read`;
|
||||
const terminalLink = `\u001b]8;;${magicLink}\u0007${magicLink}\u001b]8;;\u0007`;
|
||||
|
||||
console.log(`\n🔐 ACTION REQUIRED: Create a token with the required permissions:`);
|
||||
console.log(`\n${terminalLink}\n`);
|
||||
|
||||
githubToken = await prompt('Paste Scoped Token', '');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
githubToken = await prompt('GitHub Token', githubToken, 'A GitHub PAT is required for remote repository access and PR operations.', true);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Gemini API Auth Strategy
|
||||
console.log('\n🔐 Detecting Gemini Authentication strategy...');
|
||||
const localSettingsPath = path.join(env.HOME || '', '.gemini/settings.json');
|
||||
let authStrategy = 'google_accounts';
|
||||
let geminiApiKey = env.WORKSPACE_GEMINI_API_KEY || env.GEMINI_API_KEY || '';
|
||||
|
||||
if (fs.existsSync(localSettingsPath)) {
|
||||
try {
|
||||
const localSettings = JSON.parse(fs.readFileSync(localSettingsPath, 'utf8'));
|
||||
authStrategy = localSettings.security?.auth?.selectedType || 'google_accounts';
|
||||
if (!geminiApiKey && localSettings.security?.auth?.apiKey) {
|
||||
geminiApiKey = localSettings.security.auth.apiKey;
|
||||
}
|
||||
console.log(` - Local Auth Method: ${authStrategy}`);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (authStrategy === 'gemini-api-key') {
|
||||
if (geminiApiKey) {
|
||||
console.log('\n🔐 Found Gemini API Key in environment or settings.');
|
||||
geminiApiKey = await prompt('Gemini API Key', geminiApiKey, 'Enter to use? Or paste a new one', true);
|
||||
} else {
|
||||
console.log('\n📖 In API Key mode, the remote worker needs your Gemini API Key to authenticate.');
|
||||
geminiApiKey = await prompt('Gemini API Key', '', 'Paste your Gemini API Key', true);
|
||||
}
|
||||
} else {
|
||||
console.log(` - Using current auth strategy: ${authStrategy}`);
|
||||
}
|
||||
|
||||
// 5. Save Confirmed State
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
if (!fs.existsSync(path.dirname(settingsPath))) fs.mkdirSync(path.dirname(settingsPath), { recursive: true });
|
||||
|
||||
settings = {
|
||||
workspace: {
|
||||
projectId, zone, terminalTarget,
|
||||
userFork, upstreamRepo,
|
||||
remoteHost: 'gcli-worker',
|
||||
remoteWorkDir: '~/dev/main',
|
||||
useContainer: true
|
||||
}
|
||||
};
|
||||
fs.writeFileSync(settingsPath, JSON.stringify(settings, null, 2));
|
||||
console.log(`\n✅ Configuration saved to ${settingsPath}`);
|
||||
|
||||
// Transition to Execution
|
||||
const provider = ProviderFactory.getProvider({ projectId, zone, instanceName: targetVM });
|
||||
|
||||
console.log('\n🏗️ PHASE 2: INFRASTRUCTURE');
|
||||
console.log('--------------------------------------------------------------------------------');
|
||||
console.log(` - Verifying access and finding worker ${targetVM}...`);
|
||||
let status = await provider.getStatus();
|
||||
|
||||
if (status.status === 'UNKNOWN' || status.status === 'ERROR') {
|
||||
const shouldProvision = await confirm(`Worker ${targetVM} not found. Provision it now?`);
|
||||
if (!shouldProvision) return 1;
|
||||
|
||||
const provisionRes = await provider.provision();
|
||||
if (provisionRes !== 0) return 1;
|
||||
status = await provider.getStatus();
|
||||
}
|
||||
|
||||
if (status.status !== 'RUNNING') {
|
||||
console.log(' - Waking up worker...');
|
||||
await provider.ensureReady();
|
||||
}
|
||||
|
||||
console.log('\n🚀 PHASE 3: REMOTE INITIALIZATION');
|
||||
console.log('--------------------------------------------------------------------------------');
|
||||
const setupRes = await provider.setup({ projectId, zone, dnsSuffix: '.internal.gcpnode.com' });
|
||||
if (setupRes !== 0) return setupRes;
|
||||
|
||||
// Use the unified path to ensure host and container match perfectly
|
||||
const workspaceRoot = `/home/node/.workspaces`;
|
||||
|
||||
const persistentScripts = `${workspaceRoot}/scripts`;
|
||||
const remoteConfigDir = `${workspaceRoot}/gemini-cli-config/.gemini`;
|
||||
|
||||
console.log(`\n📦 Synchronizing Logic & Credentials...`);
|
||||
// Ensure the directory structure exists on the host
|
||||
await provider.exec(`sudo mkdir -p ${workspaceRoot}/main ${workspaceRoot}/worktrees ${workspaceRoot}/policies ${workspaceRoot}/scripts ${remoteConfigDir}`);
|
||||
await provider.exec(`sudo chown -R 1000:1000 ${workspaceRoot}`);
|
||||
await provider.exec(`sudo chmod -R 777 ${workspaceRoot}`);
|
||||
|
||||
// 1. Sync Scripts & Policies
|
||||
await provider.sync('extensions/workspaces/scripts/', `${persistentScripts}/`, { delete: true, sudo: true });
|
||||
await provider.sync('extensions/workspaces/policies/workspace-policy.toml', `${workspaceRoot}/policies/workspace-policy.toml`, { sudo: true });
|
||||
|
||||
// 2. Initialize Remote Gemini Config with Auth
|
||||
console.log('⚙️ Initializing remote Gemini configuration...');
|
||||
|
||||
// NEW: Sync local theme and UI preferences
|
||||
let localTheme = 'Shades Of Purple';
|
||||
let useAlternateBuffer = true;
|
||||
let useBackgroundColor = true;
|
||||
|
||||
if (fs.existsSync(localSettingsPath)) {
|
||||
try {
|
||||
const localSettings = JSON.parse(fs.readFileSync(localSettingsPath, 'utf8'));
|
||||
localTheme = localSettings.ui?.theme || localTheme;
|
||||
useAlternateBuffer = localSettings.ui?.useAlternateBuffer ?? useAlternateBuffer;
|
||||
useBackgroundColor = localSettings.ui?.useBackgroundColor ?? useBackgroundColor;
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
const remoteSettings: any = {
|
||||
security: {
|
||||
auth: {
|
||||
selectedType: authStrategy
|
||||
},
|
||||
folderTrust: {
|
||||
enabled: false
|
||||
}
|
||||
},
|
||||
ui: {
|
||||
theme: localTheme,
|
||||
useAlternateBuffer,
|
||||
useBackgroundColor,
|
||||
},
|
||||
general: {
|
||||
enableAutoUpdate: false
|
||||
}
|
||||
};
|
||||
|
||||
if (authStrategy === 'gemini-api-key' && geminiApiKey) {
|
||||
remoteSettings.security.auth.apiKey = geminiApiKey;
|
||||
console.log(' ✅ Configuring remote for API Key authentication.');
|
||||
}
|
||||
|
||||
const tmpSettingsPath = path.join(os.tmpdir(), `remote-settings-${Date.now()}.json`);
|
||||
fs.writeFileSync(tmpSettingsPath, JSON.stringify(remoteSettings, null, 2));
|
||||
|
||||
// Ensure the remote config dir exists before syncing
|
||||
await provider.exec(`sudo mkdir -p ${remoteConfigDir} && sudo chmod 777 ${remoteConfigDir}`);
|
||||
await provider.sync(tmpSettingsPath, `${remoteConfigDir}/settings.json`, { sudo: true });
|
||||
fs.unlinkSync(tmpSettingsPath);
|
||||
|
||||
// 3. Sync credentials for Google Accounts if needed
|
||||
if (authStrategy === 'google_accounts' || authStrategy === 'oauth-personal') {
|
||||
if (fs.existsSync(path.join(env.HOME || '', '.gemini/google_accounts.json'))) {
|
||||
await provider.sync(path.join(env.HOME || '', '.gemini/google_accounts.json'), `${remoteConfigDir}/google_accounts.json`, { sudo: true });
|
||||
console.log(' ✅ Synchronized Google Accounts credentials.');
|
||||
}
|
||||
}
|
||||
|
||||
if (githubToken) {
|
||||
await provider.exec(`echo ${githubToken} | sudo tee ${workspaceRoot}/.gh_token > /dev/null && sudo chmod 600 ${workspaceRoot}/.gh_token`);
|
||||
// Authenticate GH CLI on host
|
||||
await provider.exec(`sudo -u $(whoami) gh auth login --with-token < ${workspaceRoot}/.gh_token`);
|
||||
console.log(' ✅ Authenticated GitHub CLI on host.');
|
||||
}
|
||||
|
||||
// Final Repo Sync
|
||||
console.log(`🚀 Finalizing Remote Repository (${userFork})...`);
|
||||
const repoUrl = `https://github.com/${userFork}.git`;
|
||||
const cloneCmd = `sudo rm -rf ${workspaceRoot}/main && sudo git clone --quiet --filter=blob:none ${repoUrl} ${workspaceRoot}/main && sudo git -C ${workspaceRoot}/main remote add upstream https://github.com/${upstreamRepo}.git && sudo git -C ${workspaceRoot}/main fetch --quiet upstream && sudo chown -R 1000:1000 ${workspaceRoot}`;
|
||||
await provider.exec(cloneCmd);
|
||||
|
||||
console.log('\n✨ ALL SYSTEMS GO! Your Gemini Workspace is ready.');
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runSetup().catch(console.error);
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Workspace Status Inspector (Local)
|
||||
*
|
||||
* Orchestrates remote status retrieval via the WorkerProvider.
|
||||
*/
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { ProviderFactory } from './providers/ProviderFactory.ts';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const REPO_ROOT = path.resolve(__dirname, '../../../..');
|
||||
|
||||
async function runStatus(env: NodeJS.ProcessEnv = process.env) {
|
||||
const settingsPath = path.join(REPO_ROOT, '.gemini/workspaces/settings.json');
|
||||
if (!fs.existsSync(settingsPath)) {
|
||||
console.error('❌ Settings not found. Run "npm run workspace:setup" first.');
|
||||
return 1;
|
||||
}
|
||||
const settings = JSON.parse(fs.readFileSync(settingsPath, 'utf8'));
|
||||
const config = settings.workspace;
|
||||
if (!config) {
|
||||
console.error('❌ Deep Review configuration not found.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { projectId, zone } = config;
|
||||
const targetVM = `gcli-workspace-${env.USER || 'mattkorwel'}`;
|
||||
const provider = ProviderFactory.getProvider({ projectId, zone, instanceName: targetVM });
|
||||
|
||||
console.log(`\n🛰️ Workspace Mission Control: ${targetVM}`);
|
||||
console.log(`--------------------------------------------------------------------------------`);
|
||||
|
||||
const status = await provider.getStatus();
|
||||
console.log(` - VM State: ${status.status}`);
|
||||
console.log(` - Internal IP: ${status.internalIp || 'N/A'}`);
|
||||
|
||||
if (status.status === 'RUNNING') {
|
||||
console.log(`\n🧵 Active Sessions (tmux):`);
|
||||
// We fetch the list of sessions from INSIDE the container
|
||||
const tmuxRes = await provider.getExecOutput('tmux list-sessions -F "#S" 2>/dev/null', { wrapContainer: 'maintainer-worker' });
|
||||
|
||||
if (tmuxRes.status === 0 && tmuxRes.stdout.trim()) {
|
||||
const sessions = tmuxRes.stdout.trim().split('\n');
|
||||
sessions.forEach(s => {
|
||||
if (s.startsWith('workspace-')) {
|
||||
console.log(` ✅ ${s}`);
|
||||
} else {
|
||||
console.log(` 🔹 ${s} (Non-workspace)`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log(' - No active sessions');
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`--------------------------------------------------------------------------------\n`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
runStatus().catch(console.error);
|
||||
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* Universal Workspace Worker (Remote)
|
||||
*
|
||||
* Stateful orchestrator for complex development loops.
|
||||
*/
|
||||
import { spawnSync } from 'child_process';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { runReviewPlaybook } from './playbooks/review.ts';
|
||||
import { runFixPlaybook } from './playbooks/fix.ts';
|
||||
import { runReadyPlaybook } from './playbooks/ready.ts';
|
||||
|
||||
export async function runWorker(args: string[]) {
|
||||
const prNumberOrIssue = args[0];
|
||||
const branchName = args[1];
|
||||
const policyPath = args[2];
|
||||
const action = args[3] || 'review';
|
||||
|
||||
if (!prNumberOrIssue || !policyPath) {
|
||||
console.error('Usage: tsx worker.ts <ID> <BRANCH_NAME> <POLICY_PATH> [action]');
|
||||
return 1;
|
||||
}
|
||||
|
||||
const workDir = process.cwd();
|
||||
|
||||
// For 'implement', the ID is an issue number and we might not have a branch yet
|
||||
const isImplement = action === 'implement';
|
||||
const targetDir = isImplement ? workDir : path.join(workDir, branchName);
|
||||
|
||||
// 1. Provision Environment
|
||||
if (!isImplement && !fs.existsSync(targetDir)) {
|
||||
console.log(`🌿 Provisioning PR #${prNumberOrIssue} into ${branchName}...`);
|
||||
const cloneCmd = `git clone --filter=blob:none https://github.com/google-gemini/gemini-cli.git ${targetDir}`;
|
||||
spawnSync(cloneCmd, { stdio: 'inherit', shell: true });
|
||||
|
||||
process.chdir(targetDir);
|
||||
spawnSync('gh', ['pr', 'checkout', prNumberOrIssue], { stdio: 'inherit' });
|
||||
} else if (!isImplement) {
|
||||
process.chdir(targetDir);
|
||||
}
|
||||
|
||||
// Use global gemini command pre-installed in the maintainer image
|
||||
const geminiBin = 'gemini';
|
||||
|
||||
// 2. Dispatch to Playbook
|
||||
switch (action) {
|
||||
case 'review':
|
||||
return runReviewPlaybook(prNumberOrIssue, targetDir, policyPath, geminiBin);
|
||||
|
||||
case 'fix':
|
||||
// The 'fix' playbook now handles its own internal loop
|
||||
return runFixPlaybook(prNumberOrIssue, targetDir, policyPath, geminiBin);
|
||||
|
||||
case 'ready':
|
||||
return runReadyPlaybook(prNumberOrIssue, targetDir, policyPath, geminiBin);
|
||||
|
||||
case 'implement':
|
||||
// Lazy-load implement playbook (to be created)
|
||||
const { runImplementPlaybook } = await import('./playbooks/implement.ts');
|
||||
return runImplementPlaybook(prNumberOrIssue, workDir, policyPath, geminiBin);
|
||||
|
||||
case 'open':
|
||||
console.log(`🚀 Dropping into manual session...`);
|
||||
return 0;
|
||||
|
||||
default:
|
||||
console.error(`❌ Unknown action: ${action}`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runWorker(process.argv.slice(2)).catch(console.error);
|
||||
}
|
||||
Reference in New Issue
Block a user