mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-05-15 06:12:50 -07:00
Merge branch 'main' into dupe-listing
This commit is contained in:
@@ -0,0 +1,45 @@
|
|||||||
|
name: '🧠 Gemini CLI Bot: Brain'
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *' # Every 24 hours
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: '${{ github.workflow }}-${{ github.ref }}'
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: 'write'
|
||||||
|
issues: 'write'
|
||||||
|
pull-requests: 'write'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
brain:
|
||||||
|
name: 'Brain (Reasoning Layer)'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
if: "github.repository == 'google-gemini/gemini-cli'"
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout'
|
||||||
|
uses: 'actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8' # ratchet:actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: 'Setup Node.js'
|
||||||
|
uses: 'actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020' # ratchet:actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: 'Install dependencies'
|
||||||
|
run: 'npm ci'
|
||||||
|
|
||||||
|
- name: 'Build Gemini CLI'
|
||||||
|
run: 'npm run bundle'
|
||||||
|
|
||||||
|
- name: 'Download Previous Metrics'
|
||||||
|
uses: 'actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093' # ratchet:actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: 'metrics-before'
|
||||||
|
path: 'tools/gemini-cli-bot/history/'
|
||||||
|
continue-on-error: true
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
name: '🔄 Gemini CLI Bot: Pulse'
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '*/30 * * * *' # Every 30 minutes
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: '${{ github.workflow }}-${{ github.ref }}'
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: 'write'
|
||||||
|
issues: 'write'
|
||||||
|
pull-requests: 'write'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pulse:
|
||||||
|
name: 'Pulse (Reflex Layer)'
|
||||||
|
runs-on: 'ubuntu-latest'
|
||||||
|
if: "github.repository == 'google-gemini/gemini-cli'"
|
||||||
|
steps:
|
||||||
|
- name: 'Checkout'
|
||||||
|
uses: 'actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8' # ratchet:actions/checkout@v5
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: 'Setup Node.js'
|
||||||
|
uses: 'actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020' # ratchet:actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: 'Install dependencies'
|
||||||
|
run: 'npm ci'
|
||||||
|
|
||||||
|
- name: 'Collect Metrics'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
|
run: 'npm run metrics'
|
||||||
|
|
||||||
|
- name: 'Archive Metrics'
|
||||||
|
uses: 'actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02' # ratchet:actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: 'metrics-before'
|
||||||
|
path: 'metrics-before.csv'
|
||||||
|
|
||||||
|
- name: 'Run Reflex Processes'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: '${{ secrets.GITHUB_TOKEN }}'
|
||||||
|
run: |
|
||||||
|
if [ -d "tools/gemini-cli-bot/processes/scripts" ] && [ "$(ls -A tools/gemini-cli-bot/processes/scripts)" ]; then
|
||||||
|
for script in tools/gemini-cli-bot/processes/scripts/*.ts; do
|
||||||
|
echo "Running reflex script: $script"
|
||||||
|
npx tsx "$script"
|
||||||
|
done
|
||||||
|
else
|
||||||
|
echo "No reflex scripts found."
|
||||||
|
fi
|
||||||
@@ -63,6 +63,7 @@
|
|||||||
"lint:all": "node scripts/lint.js",
|
"lint:all": "node scripts/lint.js",
|
||||||
"format": "prettier --experimental-cli --write .",
|
"format": "prettier --experimental-cli --write .",
|
||||||
"typecheck": "npm run typecheck --workspaces --if-present && tsc -b evals/tsconfig.json integration-tests/tsconfig.json memory-tests/tsconfig.json",
|
"typecheck": "npm run typecheck --workspaces --if-present && tsc -b evals/tsconfig.json integration-tests/tsconfig.json memory-tests/tsconfig.json",
|
||||||
|
"metrics": "tsx tools/gemini-cli-bot/metrics/index.ts",
|
||||||
"preflight": "npm run clean && npm ci && npm run format && npm run build && npm run lint:ci && npm run typecheck && npm run test:ci",
|
"preflight": "npm run clean && npm ci && npm run format && npm run build && npm run lint:ci && npm run typecheck && npm run test:ci",
|
||||||
"prepare": "husky && npm run bundle",
|
"prepare": "husky && npm run bundle",
|
||||||
"prepare:package": "node scripts/prepare-package.js",
|
"prepare:package": "node scripts/prepare-package.js",
|
||||||
|
|||||||
@@ -358,8 +358,8 @@ export async function main() {
|
|||||||
|
|
||||||
const isDebugMode = cliConfig.isDebugMode(argv);
|
const isDebugMode = cliConfig.isDebugMode(argv);
|
||||||
const consolePatcher = new ConsolePatcher({
|
const consolePatcher = new ConsolePatcher({
|
||||||
stderr: true,
|
stderr: argv.isCommand ? false : true,
|
||||||
interactive: isHeadlessMode() ? false : true,
|
interactive: isHeadlessMode() && !argv.isCommand ? false : true,
|
||||||
debugMode: isDebugMode,
|
debugMode: isDebugMode,
|
||||||
onNewMessage: (msg) => {
|
onNewMessage: (msg) => {
|
||||||
coreEvents.emitConsoleLog(msg.type, msg.content);
|
coreEvents.emitConsoleLog(msg.type, msg.content);
|
||||||
@@ -786,20 +786,16 @@ export function initializeOutputListenersAndFlush() {
|
|||||||
if (coreEvents.listenerCount(CoreEvent.ConsoleLog) === 0) {
|
if (coreEvents.listenerCount(CoreEvent.ConsoleLog) === 0) {
|
||||||
coreEvents.on(CoreEvent.ConsoleLog, (payload: ConsoleLogPayload) => {
|
coreEvents.on(CoreEvent.ConsoleLog, (payload: ConsoleLogPayload) => {
|
||||||
if (payload.type === 'error' || payload.type === 'warn') {
|
if (payload.type === 'error' || payload.type === 'warn') {
|
||||||
writeToStderr(payload.content);
|
writeToStderr(payload.content + '\n');
|
||||||
} else {
|
} else {
|
||||||
writeToStdout(payload.content);
|
writeToStderr(payload.content + '\n');
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (coreEvents.listenerCount(CoreEvent.UserFeedback) === 0) {
|
if (coreEvents.listenerCount(CoreEvent.UserFeedback) === 0) {
|
||||||
coreEvents.on(CoreEvent.UserFeedback, (payload: UserFeedbackPayload) => {
|
coreEvents.on(CoreEvent.UserFeedback, (payload: UserFeedbackPayload) => {
|
||||||
if (payload.severity === 'error' || payload.severity === 'warning') {
|
writeToStderr(payload.message + '\n');
|
||||||
writeToStderr(payload.message);
|
|
||||||
} else {
|
|
||||||
writeToStdout(payload.message);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,51 @@
|
|||||||
|
# Gemini CLI Bot (Cognitive Repository)
|
||||||
|
|
||||||
|
This directory contains the foundational architecture for the `gemini-cli-bot`,
|
||||||
|
transforming the repository into a proactive, evolutionary system.
|
||||||
|
|
||||||
|
It implements a dual-layer approach to balance immediate responsiveness with
|
||||||
|
long-term strategic optimization.
|
||||||
|
|
||||||
|
## Layered Execution Model
|
||||||
|
|
||||||
|
### 1. System 1: The Pulse (Reflex Layer)
|
||||||
|
|
||||||
|
- **Purpose**: High-frequency, deterministic maintenance and data collection.
|
||||||
|
- **Frequency**: 30-minute cron (`.github/workflows/gemini-cli-bot-pulse.yml`).
|
||||||
|
- **Implementation**: Pure TypeScript/JavaScript scripts.
|
||||||
|
- **Role**: Currently focuses on gathering repository metrics
|
||||||
|
(`tools/gemini-cli-bot/metrics/scripts`).
|
||||||
|
- **Output**: Action execution and `metrics-before.csv` artifact generation.
|
||||||
|
|
||||||
|
### 2. System 2: The Brain (Reasoning Layer)
|
||||||
|
|
||||||
|
- **Purpose**: Strategic investigation, policy refinement, and
|
||||||
|
self-optimization.
|
||||||
|
- **Frequency**: 24-hour cron (`.github/workflows/gemini-cli-bot-brain.yml`).
|
||||||
|
- **Implementation**: Agentic Gemini CLI phases.
|
||||||
|
- **Role**: Analyzing metric trends and running deeper repository health
|
||||||
|
investigations.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
- `metrics/`: Contains the deterministic runner (`index.ts`) and individual
|
||||||
|
TypeScript scripts (`scripts/`) that use the GitHub CLI to track metrics like
|
||||||
|
open issues, PR latency, throughput, and reviewer domain expertise.
|
||||||
|
- `processes/scripts/`: Placeholder directory for future deterministic triage
|
||||||
|
and routing scripts.
|
||||||
|
- `investigations/`: Placeholder directory for agentic root-cause analysis
|
||||||
|
phases.
|
||||||
|
- `critique/`: Placeholder directory for policy evaluation.
|
||||||
|
- `history/`: Storage for downloaded metrics artifacts from previous runs.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
To manually collect repository metrics locally, run the following command from
|
||||||
|
the workspace root:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run metrics
|
||||||
|
```
|
||||||
|
|
||||||
|
This will execute all scripts within `metrics/scripts/` and output the results
|
||||||
|
to a `metrics-before.csv` file in the root directory.
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { readdirSync, writeFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
const SCRIPTS_DIR = join(
|
||||||
|
process.cwd(),
|
||||||
|
'tools',
|
||||||
|
'gemini-cli-bot',
|
||||||
|
'metrics',
|
||||||
|
'scripts',
|
||||||
|
);
|
||||||
|
const OUTPUT_FILE = join(process.cwd(), 'metrics-before.csv');
|
||||||
|
|
||||||
|
function processOutputLine(line: string, results: string[]) {
|
||||||
|
const trimmedLine = line.trim();
|
||||||
|
if (!trimmedLine) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(trimmedLine);
|
||||||
|
if (
|
||||||
|
parsed &&
|
||||||
|
typeof parsed === 'object' &&
|
||||||
|
'metric' in parsed &&
|
||||||
|
'value' in parsed
|
||||||
|
) {
|
||||||
|
results.push(`${parsed.metric},${parsed.value}`);
|
||||||
|
} else {
|
||||||
|
results.push(trimmedLine);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
results.push(trimmedLine);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function run() {
|
||||||
|
const scripts = readdirSync(SCRIPTS_DIR).filter(
|
||||||
|
(file) => file.endsWith('.ts') || file.endsWith('.js'),
|
||||||
|
);
|
||||||
|
|
||||||
|
const results: string[] = ['metric,value'];
|
||||||
|
|
||||||
|
for (const script of scripts) {
|
||||||
|
console.log(`Running metric script: ${script}`);
|
||||||
|
try {
|
||||||
|
const scriptPath = join(SCRIPTS_DIR, script);
|
||||||
|
const output = execSync(`npx tsx ${JSON.stringify(scriptPath)}`, {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
});
|
||||||
|
|
||||||
|
const lines = output.trim().split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
processOutputLine(line, results);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error running ${script}:`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeFileSync(OUTPUT_FILE, results.join('\n'));
|
||||||
|
console.log(`Saved metrics to ${OUTPUT_FILE}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
run().catch(console.error);
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = path.dirname(__filename);
|
||||||
|
const repoRoot = path.resolve(__dirname, '../../../../');
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 1. Fetch recent PR numbers and reviews from GitHub (so we have reviewer names/logins)
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100, states: MERGED) {
|
||||||
|
nodes {
|
||||||
|
number
|
||||||
|
reviews(first: 20) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
author { login, ... on User { name } }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
// 2. Map PR numbers to local commits using git log
|
||||||
|
const logOutput = execSync('git log -n 5000 --format="%H|%s"', {
|
||||||
|
cwd: repoRoot,
|
||||||
|
encoding: 'utf-8',
|
||||||
|
stdio: ['ignore', 'pipe', 'ignore'],
|
||||||
|
});
|
||||||
|
const prCommits = new Map<number, string>();
|
||||||
|
for (const line of logOutput.split('\n')) {
|
||||||
|
if (!line) continue;
|
||||||
|
const [hash, subject] = line.split('|');
|
||||||
|
const match = subject.match(/\(#(\d+)\)$/);
|
||||||
|
if (match) {
|
||||||
|
prCommits.set(parseInt(match[1], 10), hash);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let totalMaintainerReviews = 0;
|
||||||
|
let maintainerReviewsWithExpertise = 0;
|
||||||
|
|
||||||
|
for (const pr of data.pullRequests.nodes) {
|
||||||
|
if (!pr.reviews?.nodes || pr.reviews.nodes.length === 0) continue;
|
||||||
|
|
||||||
|
const commitHash = prCommits.get(pr.number);
|
||||||
|
if (!commitHash) continue; // Skip if we don't have the commit locally
|
||||||
|
|
||||||
|
// 3. Get exact files changed using local git diff-tree, bypassing GraphQL limits
|
||||||
|
const diffTreeOutput = execSync(
|
||||||
|
`git diff-tree --no-commit-id --name-only -r ${commitHash}`,
|
||||||
|
{ cwd: repoRoot, encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||||
|
);
|
||||||
|
const files = diffTreeOutput.split('\n').filter(Boolean);
|
||||||
|
if (files.length === 0) continue;
|
||||||
|
|
||||||
|
// Cache git log authors per path to avoid redundant child_process calls
|
||||||
|
const authorCache = new Map<string, string>();
|
||||||
|
const getAuthors = (targetPath: string) => {
|
||||||
|
if (authorCache.has(targetPath)) return authorCache.get(targetPath)!;
|
||||||
|
try {
|
||||||
|
const authors = execSync(
|
||||||
|
`git log --format="%an|%ae" -- ${JSON.stringify(targetPath)}`,
|
||||||
|
{
|
||||||
|
cwd: repoRoot,
|
||||||
|
encoding: 'utf-8',
|
||||||
|
stdio: ['ignore', 'pipe', 'ignore'],
|
||||||
|
},
|
||||||
|
).toLowerCase();
|
||||||
|
authorCache.set(targetPath, authors);
|
||||||
|
return authors;
|
||||||
|
} catch {
|
||||||
|
authorCache.set(targetPath, '');
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const reviewersOnPR = new Map<string, { name?: string }>();
|
||||||
|
for (const review of pr.reviews.nodes) {
|
||||||
|
if (
|
||||||
|
['MEMBER', 'OWNER'].includes(review.authorAssociation) &&
|
||||||
|
review.author?.login
|
||||||
|
) {
|
||||||
|
const login = review.author.login.toLowerCase();
|
||||||
|
if (login.endsWith('[bot]') || login.includes('bot')) continue;
|
||||||
|
reviewersOnPR.set(login, review.author);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const [login, authorInfo] of reviewersOnPR.entries()) {
|
||||||
|
totalMaintainerReviews++;
|
||||||
|
let hasExpertise = false;
|
||||||
|
const name = authorInfo.name ? authorInfo.name.toLowerCase() : '';
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
// Precise check: immediate file
|
||||||
|
let authorsStr = getAuthors(file);
|
||||||
|
if (authorsStr.includes(login) || (name && authorsStr.includes(name))) {
|
||||||
|
hasExpertise = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: file's directory
|
||||||
|
const dir = path.dirname(file);
|
||||||
|
authorsStr = getAuthors(dir);
|
||||||
|
if (authorsStr.includes(login) || (name && authorsStr.includes(name))) {
|
||||||
|
hasExpertise = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hasExpertise) {
|
||||||
|
maintainerReviewsWithExpertise++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const ratio =
|
||||||
|
totalMaintainerReviews > 0
|
||||||
|
? maintainerReviewsWithExpertise / totalMaintainerReviews
|
||||||
|
: 0;
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
JSON.stringify(<MetricOutput>{
|
||||||
|
metric: 'domain_expertise',
|
||||||
|
value: Math.round(ratio * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
details: {
|
||||||
|
totalMaintainerReviews,
|
||||||
|
maintainerReviewsWithExpertise,
|
||||||
|
},
|
||||||
|
}) + '\n',
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,138 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100, states: MERGED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
createdAt
|
||||||
|
mergedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
issues(last: 100, states: CLOSED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
createdAt
|
||||||
|
closedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8' },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
const prs = data.pullRequests.nodes.map(
|
||||||
|
(p: {
|
||||||
|
authorAssociation: string;
|
||||||
|
mergedAt: string;
|
||||||
|
createdAt: string;
|
||||||
|
}) => ({
|
||||||
|
association: p.authorAssociation,
|
||||||
|
latencyHours:
|
||||||
|
(new Date(p.mergedAt).getTime() - new Date(p.createdAt).getTime()) /
|
||||||
|
(1000 * 60 * 60),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
const issues = data.issues.nodes.map(
|
||||||
|
(i: {
|
||||||
|
authorAssociation: string;
|
||||||
|
closedAt: string;
|
||||||
|
createdAt: string;
|
||||||
|
}) => ({
|
||||||
|
association: i.authorAssociation,
|
||||||
|
latencyHours:
|
||||||
|
(new Date(i.closedAt).getTime() - new Date(i.createdAt).getTime()) /
|
||||||
|
(1000 * 60 * 60),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const isMaintainer = (assoc: string) =>
|
||||||
|
['MEMBER', 'OWNER', 'COLLABORATOR'].includes(assoc);
|
||||||
|
const calculateAvg = (
|
||||||
|
items: { association: string; latencyHours: number }[],
|
||||||
|
) =>
|
||||||
|
items.length
|
||||||
|
? items.reduce((a, b) => a + b.latencyHours, 0) / items.length
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const prMaintainers = calculateAvg(
|
||||||
|
prs.filter((i: { association: string; latencyHours: number }) =>
|
||||||
|
isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const prCommunity = calculateAvg(
|
||||||
|
prs.filter(
|
||||||
|
(i: { association: string; latencyHours: number }) =>
|
||||||
|
!isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const prOverall = calculateAvg(prs);
|
||||||
|
|
||||||
|
const issueMaintainers = calculateAvg(
|
||||||
|
issues.filter((i: { association: string; latencyHours: number }) =>
|
||||||
|
isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const issueCommunity = calculateAvg(
|
||||||
|
issues.filter(
|
||||||
|
(i: { association: string; latencyHours: number }) =>
|
||||||
|
!isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const issueOverall = calculateAvg(issues);
|
||||||
|
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
const metrics: MetricOutput[] = [
|
||||||
|
{
|
||||||
|
metric: 'latency_pr_overall_hours',
|
||||||
|
value: Math.round(prOverall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'latency_pr_maintainers_hours',
|
||||||
|
value: Math.round(prMaintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'latency_pr_community_hours',
|
||||||
|
value: Math.round(prCommunity * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'latency_issue_overall_hours',
|
||||||
|
value: Math.round(issueOverall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'latency_issue_maintainers_hours',
|
||||||
|
value: Math.round(issueMaintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'latency_issue_community_hours',
|
||||||
|
value: Math.round(issueCommunity * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
metrics.forEach((m) => process.stdout.write(JSON.stringify(m) + '\n'));
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const count = execSync(
|
||||||
|
'gh issue list --state open --limit 1000 --json number --jq length',
|
||||||
|
{
|
||||||
|
encoding: 'utf-8',
|
||||||
|
},
|
||||||
|
).trim();
|
||||||
|
console.log(`open_issues,${count}`);
|
||||||
|
} catch {
|
||||||
|
// Fallback if gh fails or no issues found
|
||||||
|
console.log('open_issues,0');
|
||||||
|
}
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const count = execSync(
|
||||||
|
'gh pr list --state open --limit 1000 --json number --jq length',
|
||||||
|
{
|
||||||
|
encoding: 'utf-8',
|
||||||
|
},
|
||||||
|
).trim();
|
||||||
|
console.log(`open_prs,${count}`);
|
||||||
|
} catch {
|
||||||
|
// Fallback if gh fails or no PRs found
|
||||||
|
console.log('open_prs,0');
|
||||||
|
}
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100) {
|
||||||
|
nodes {
|
||||||
|
reviews(first: 50) {
|
||||||
|
nodes {
|
||||||
|
author { login }
|
||||||
|
authorAssociation
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8' },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
const reviewCounts: Record<string, number> = {};
|
||||||
|
|
||||||
|
for (const pr of data.pullRequests.nodes) {
|
||||||
|
if (!pr.reviews?.nodes) continue;
|
||||||
|
// We only count one review per author per PR to avoid counting multiple review comments as multiple reviews
|
||||||
|
const reviewersOnPR = new Set<string>();
|
||||||
|
|
||||||
|
for (const review of pr.reviews.nodes) {
|
||||||
|
if (
|
||||||
|
['MEMBER', 'OWNER'].includes(review.authorAssociation) &&
|
||||||
|
review.author?.login
|
||||||
|
) {
|
||||||
|
const login = review.author.login.toLowerCase();
|
||||||
|
if (login.endsWith('[bot]') || login.includes('bot')) {
|
||||||
|
continue; // Ignore bots
|
||||||
|
}
|
||||||
|
reviewersOnPR.add(review.author.login);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const reviewer of reviewersOnPR) {
|
||||||
|
reviewCounts[reviewer] = (reviewCounts[reviewer] || 0) + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const counts = Object.values(reviewCounts);
|
||||||
|
|
||||||
|
let variance = 0;
|
||||||
|
if (counts.length > 0) {
|
||||||
|
const mean = counts.reduce((a, b) => a + b, 0) / counts.length;
|
||||||
|
variance =
|
||||||
|
counts.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / counts.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
JSON.stringify(<MetricOutput>{
|
||||||
|
metric: 'review_distribution_variance',
|
||||||
|
value: Math.round(variance * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
details: reviewCounts,
|
||||||
|
}) + '\n',
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,148 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100, states: MERGED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
mergedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
issues(last: 100, states: CLOSED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
closedAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8' },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
const prs = data.pullRequests.nodes
|
||||||
|
.map((p: { authorAssociation: string; mergedAt: string }) => ({
|
||||||
|
association: p.authorAssociation,
|
||||||
|
date: new Date(p.mergedAt).getTime(),
|
||||||
|
}))
|
||||||
|
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
|
||||||
|
|
||||||
|
const issues = data.issues.nodes
|
||||||
|
.map((i: { authorAssociation: string; closedAt: string }) => ({
|
||||||
|
association: i.authorAssociation,
|
||||||
|
date: new Date(i.closedAt).getTime(),
|
||||||
|
}))
|
||||||
|
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
|
||||||
|
|
||||||
|
const isMaintainer = (assoc: string) =>
|
||||||
|
['MEMBER', 'OWNER', 'COLLABORATOR'].includes(assoc);
|
||||||
|
|
||||||
|
const calculateThroughput = (
|
||||||
|
items: { association: string; date: number }[],
|
||||||
|
) => {
|
||||||
|
if (items.length < 2) return 0;
|
||||||
|
const first = items[0].date;
|
||||||
|
const last = items[items.length - 1].date;
|
||||||
|
const days = (last - first) / (1000 * 60 * 60 * 24);
|
||||||
|
return days > 0 ? items.length / days : items.length; // items per day
|
||||||
|
};
|
||||||
|
|
||||||
|
const prOverall = calculateThroughput(prs);
|
||||||
|
const prMaintainers = calculateThroughput(
|
||||||
|
prs.filter((i: { association: string; date: number }) =>
|
||||||
|
isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const prCommunity = calculateThroughput(
|
||||||
|
prs.filter(
|
||||||
|
(i: { association: string; date: number }) =>
|
||||||
|
!isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const issueOverall = calculateThroughput(issues);
|
||||||
|
const issueMaintainers = calculateThroughput(
|
||||||
|
issues.filter((i: { association: string; date: number }) =>
|
||||||
|
isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
const issueCommunity = calculateThroughput(
|
||||||
|
issues.filter(
|
||||||
|
(i: { association: string; date: number }) =>
|
||||||
|
!isMaintainer(i.association),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
const metrics: MetricOutput[] = [
|
||||||
|
{
|
||||||
|
metric: 'throughput_pr_overall_per_day',
|
||||||
|
value: Math.round(prOverall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_pr_maintainers_per_day',
|
||||||
|
value: Math.round(prMaintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_pr_community_per_day',
|
||||||
|
value: Math.round(prCommunity * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_overall_per_day',
|
||||||
|
value: Math.round(issueOverall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_maintainers_per_day',
|
||||||
|
value: Math.round(issueMaintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_community_per_day',
|
||||||
|
value: Math.round(issueCommunity * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_overall_days_per_issue',
|
||||||
|
value: issueOverall > 0 ? Math.round((1 / issueOverall) * 100) / 100 : 0,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_maintainers_days_per_issue',
|
||||||
|
value:
|
||||||
|
issueMaintainers > 0
|
||||||
|
? Math.round((1 / issueMaintainers) * 100) / 100
|
||||||
|
: 0,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'throughput_issue_community_days_per_issue',
|
||||||
|
value:
|
||||||
|
issueCommunity > 0 ? Math.round((1 / issueCommunity) * 100) / 100 : 0,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
metrics.forEach((m) => process.stdout.write(JSON.stringify(m) + '\n'));
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
author { login }
|
||||||
|
createdAt
|
||||||
|
comments(first: 20) {
|
||||||
|
nodes {
|
||||||
|
author { login }
|
||||||
|
createdAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reviews(first: 20) {
|
||||||
|
nodes {
|
||||||
|
author { login }
|
||||||
|
createdAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
issues(last: 100) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
author { login }
|
||||||
|
createdAt
|
||||||
|
comments(first: 20) {
|
||||||
|
nodes {
|
||||||
|
author { login }
|
||||||
|
createdAt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8' },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
const getFirstResponseTime = (item: {
|
||||||
|
createdAt: string;
|
||||||
|
author: { login: string };
|
||||||
|
comments: { nodes: { createdAt: string; author?: { login: string } }[] };
|
||||||
|
reviews?: { nodes: { createdAt: string; author?: { login: string } }[] };
|
||||||
|
}) => {
|
||||||
|
const authorLogin = item.author?.login;
|
||||||
|
let earliestResponse: number | null = null;
|
||||||
|
|
||||||
|
const checkNodes = (
|
||||||
|
nodes: { createdAt: string; author?: { login: string } }[],
|
||||||
|
) => {
|
||||||
|
for (const node of nodes) {
|
||||||
|
if (node.author?.login && node.author.login !== authorLogin) {
|
||||||
|
const login = node.author.login.toLowerCase();
|
||||||
|
if (login.endsWith('[bot]') || login.includes('bot')) {
|
||||||
|
continue; // Ignore bots
|
||||||
|
}
|
||||||
|
const time = new Date(node.createdAt).getTime();
|
||||||
|
if (!earliestResponse || time < earliestResponse) {
|
||||||
|
earliestResponse = time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if (item.comments?.nodes) checkNodes(item.comments.nodes);
|
||||||
|
if (item.reviews?.nodes) checkNodes(item.reviews.nodes);
|
||||||
|
|
||||||
|
if (earliestResponse) {
|
||||||
|
return (
|
||||||
|
(earliestResponse - new Date(item.createdAt).getTime()) /
|
||||||
|
(1000 * 60 * 60)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return null; // No response yet
|
||||||
|
};
|
||||||
|
const processItems = (
|
||||||
|
items: {
|
||||||
|
authorAssociation: string;
|
||||||
|
createdAt: string;
|
||||||
|
author: { login: string };
|
||||||
|
comments: {
|
||||||
|
nodes: { createdAt: string; author?: { login: string } }[];
|
||||||
|
};
|
||||||
|
reviews?: {
|
||||||
|
nodes: { createdAt: string; author?: { login: string } }[];
|
||||||
|
};
|
||||||
|
}[],
|
||||||
|
) => {
|
||||||
|
return items
|
||||||
|
.map((item) => ({
|
||||||
|
association: item.authorAssociation,
|
||||||
|
ttfr: getFirstResponseTime(item),
|
||||||
|
}))
|
||||||
|
.filter((i) => i.ttfr !== null) as {
|
||||||
|
association: string;
|
||||||
|
ttfr: number;
|
||||||
|
}[];
|
||||||
|
};
|
||||||
|
const prs = processItems(data.pullRequests.nodes);
|
||||||
|
const issues = processItems(data.issues.nodes);
|
||||||
|
const allItems = [...prs, ...issues];
|
||||||
|
|
||||||
|
const isMaintainer = (assoc: string) => ['MEMBER', 'OWNER'].includes(assoc);
|
||||||
|
const is1P = (assoc: string) => ['COLLABORATOR'].includes(assoc);
|
||||||
|
|
||||||
|
const calculateAvg = (items: { ttfr: number; association: string }[]) =>
|
||||||
|
items.length ? items.reduce((a, b) => a + b.ttfr, 0) / items.length : 0;
|
||||||
|
|
||||||
|
const maintainers = calculateAvg(
|
||||||
|
allItems.filter((i) => isMaintainer(i.association)),
|
||||||
|
);
|
||||||
|
const firstParty = calculateAvg(allItems.filter((i) => is1P(i.association)));
|
||||||
|
const overall = calculateAvg(allItems);
|
||||||
|
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
const metrics: MetricOutput[] = [
|
||||||
|
{
|
||||||
|
metric: 'time_to_first_response_overall_hours',
|
||||||
|
value: Math.round(overall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'time_to_first_response_maintainers_hours',
|
||||||
|
value: Math.round(maintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
metric: 'time_to_first_response_1p_hours',
|
||||||
|
value: Math.round(firstParty * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
metrics.forEach((m) => process.stdout.write(JSON.stringify(m) + '\n'));
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,100 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*
|
||||||
|
* @license
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { GITHUB_OWNER, GITHUB_REPO, type MetricOutput } from '../types.js';
|
||||||
|
import { execSync } from 'node:child_process';
|
||||||
|
|
||||||
|
try {
|
||||||
|
const query = `
|
||||||
|
query($owner: String!, $repo: String!) {
|
||||||
|
repository(owner: $owner, name: $repo) {
|
||||||
|
pullRequests(last: 100, states: MERGED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
comments { totalCount }
|
||||||
|
reviews { totalCount }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
issues(last: 100, states: CLOSED) {
|
||||||
|
nodes {
|
||||||
|
authorAssociation
|
||||||
|
comments { totalCount }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
const output = execSync(
|
||||||
|
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||||
|
{ encoding: 'utf-8' },
|
||||||
|
);
|
||||||
|
const data = JSON.parse(output).data.repository;
|
||||||
|
|
||||||
|
const prs = data.pullRequests.nodes;
|
||||||
|
const issues = data.issues.nodes;
|
||||||
|
|
||||||
|
const allItems = [
|
||||||
|
...prs.map(
|
||||||
|
(p: {
|
||||||
|
authorAssociation: string;
|
||||||
|
comments: { totalCount: number };
|
||||||
|
reviews?: { totalCount: number };
|
||||||
|
}) => ({
|
||||||
|
association: p.authorAssociation,
|
||||||
|
touches: p.comments.totalCount + (p.reviews ? p.reviews.totalCount : 0),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
...issues.map(
|
||||||
|
(i: { authorAssociation: string; comments: { totalCount: number } }) => ({
|
||||||
|
association: i.authorAssociation,
|
||||||
|
touches: i.comments.totalCount,
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
|
||||||
|
const isMaintainer = (assoc: string) =>
|
||||||
|
['MEMBER', 'OWNER', 'COLLABORATOR'].includes(assoc);
|
||||||
|
|
||||||
|
const calculateAvg = (items: { touches: number; association: string }[]) =>
|
||||||
|
items.length ? items.reduce((a, b) => a + b.touches, 0) / items.length : 0;
|
||||||
|
|
||||||
|
const overall = calculateAvg(allItems);
|
||||||
|
const maintainers = calculateAvg(
|
||||||
|
allItems.filter((i) => isMaintainer(i.association)),
|
||||||
|
);
|
||||||
|
const community = calculateAvg(
|
||||||
|
allItems.filter((i) => !isMaintainer(i.association)),
|
||||||
|
);
|
||||||
|
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
JSON.stringify(<MetricOutput>{
|
||||||
|
metric: 'user_touches_overall',
|
||||||
|
value: Math.round(overall * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
}) + '\n',
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
JSON.stringify(<MetricOutput>{
|
||||||
|
metric: 'user_touches_maintainers',
|
||||||
|
value: Math.round(maintainers * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
}) + '\n',
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
JSON.stringify(<MetricOutput>{
|
||||||
|
metric: 'user_touches_community',
|
||||||
|
value: Math.round(community * 100) / 100,
|
||||||
|
timestamp,
|
||||||
|
}) + '\n',
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
process.stderr.write(err instanceof Error ? err.message : String(err));
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
@@ -0,0 +1,14 @@
|
|||||||
|
/**
|
||||||
|
* @license
|
||||||
|
* Copyright 2026 Google LLC
|
||||||
|
* SPDX-License-Identifier: Apache-2.0
|
||||||
|
*/
|
||||||
|
export interface MetricOutput {
|
||||||
|
metric: string;
|
||||||
|
value: number | string;
|
||||||
|
timestamp: string;
|
||||||
|
details?: Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const GITHUB_OWNER = 'google-gemini';
|
||||||
|
export const GITHUB_REPO = 'gemini-cli';
|
||||||
Reference in New Issue
Block a user