mirror of
https://github.com/google-gemini/gemini-cli.git
synced 2026-05-15 06:12:50 -07:00
## Description
Fixes the throughput metrics script and introduces new visibility into backlog bottlenecks and priority distribution. ### Changes - **Throughput Fixes**: Resolved a `ReferenceError` where `isMaintainer` was not correctly scoped, fixed a malformed license header, and added a new metric for `issue_arrival_rate_per_day` to enable growth-vs-closure analysis. - **Backlog Bottlenecks**: Introduced `bottlenecks.ts` to identify "Zombie" issues (no activity > 30 days) and "Hot" issues (high activity). - **Priority Distribution**: Introduced `priority_distribution.ts` to track the count of open issues by priority level (P0-P3). ### Impact These metrics will provide the necessary data to confirm if the repository is experiencing systemic backlog growth (Arrival Rate > Throughput) and help identify which segments of the backlog require urgent triage.
This commit is contained in:
@@ -0,0 +1,101 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { GITHUB_OWNER, GITHUB_REPO } from '../types.js';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
interface IssueNode {
|
||||
number: number;
|
||||
updatedAt: string;
|
||||
comments: {
|
||||
totalCount: number;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Identifies "Zombie" issues (open issues with no activity for > 30 days).
|
||||
*/
|
||||
function run() {
|
||||
try {
|
||||
// Fetch 100 open issues, sorted by least recently updated.
|
||||
const query = `
|
||||
query($owner: String!, $repo: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issues(first: 100, states: OPEN, orderBy: {field: UPDATED_AT, direction: ASC}) {
|
||||
nodes {
|
||||
number
|
||||
updatedAt
|
||||
comments {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
const output = execSync(
|
||||
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||
{ encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||
).trim();
|
||||
const data = JSON.parse(output).data.repository;
|
||||
const issues: IssueNode[] = data.issues.nodes;
|
||||
|
||||
if (issues.length === 0) {
|
||||
process.stdout.write('bottleneck_zombie_issues_count,0\n');
|
||||
return;
|
||||
}
|
||||
|
||||
const now = new Date().getTime();
|
||||
const thirtyDaysAgo = now - 30 * 24 * 60 * 60 * 1000;
|
||||
|
||||
const zombies = issues.filter((issue) => {
|
||||
const updated = new Date(issue.updatedAt).getTime();
|
||||
return updated < thirtyDaysAgo;
|
||||
});
|
||||
|
||||
process.stdout.write(`bottleneck_zombie_issues_count,${zombies.length}\n`);
|
||||
|
||||
// Also identify "Hot" issues in the same sample (though less likely to find them in the 'oldest' sample)
|
||||
// But we can also fetch 'most recently updated' to find Hot issues.
|
||||
const hotQuery = `
|
||||
query($owner: String!, $repo: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issues(last: 100, states: OPEN, orderBy: {field: UPDATED_AT, direction: ASC}) {
|
||||
nodes {
|
||||
number
|
||||
updatedAt
|
||||
comments {
|
||||
totalCount
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
const hotOutput = execSync(
|
||||
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${hotQuery}'`,
|
||||
{ encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||
).trim();
|
||||
const hotData = JSON.parse(hotOutput).data.repository;
|
||||
const hotIssues: IssueNode[] = hotData.issues.nodes;
|
||||
|
||||
const sevenDaysAgo = now - 7 * 24 * 60 * 60 * 1000;
|
||||
const veryHot = hotIssues.filter((issue) => {
|
||||
const updated = new Date(issue.updatedAt).getTime();
|
||||
return updated > sevenDaysAgo && issue.comments.totalCount > 10;
|
||||
});
|
||||
|
||||
process.stdout.write(`bottleneck_hot_issues_count,${veryHot.length}\n`);
|
||||
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
@@ -0,0 +1,90 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
import { GITHUB_OWNER, GITHUB_REPO } from '../types.js';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
interface IssueNode {
|
||||
labels: {
|
||||
nodes: Array<{ name: string }>;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the distribution of open issues across priority labels.
|
||||
*/
|
||||
function run() {
|
||||
try {
|
||||
// Fetch last 100 open issues and their labels.
|
||||
// Using 'last' to get more recent context, but distribution is better from a larger sample.
|
||||
const query = `
|
||||
query($owner: String!, $repo: String!) {
|
||||
repository(owner: $owner, name: $repo) {
|
||||
issues(last: 100, states: OPEN) {
|
||||
nodes {
|
||||
labels(first: 20) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
const output = execSync(
|
||||
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||
{ encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||
).trim();
|
||||
const data = JSON.parse(output).data.repository;
|
||||
const issues: IssueNode[] = data.issues.nodes;
|
||||
|
||||
const distribution: Record<string, number> = {
|
||||
p0: 0,
|
||||
p1: 0,
|
||||
p2: 0,
|
||||
p3: 0,
|
||||
other: 0,
|
||||
};
|
||||
|
||||
issues.forEach((issue) => {
|
||||
let found = false;
|
||||
issue.labels.nodes.forEach((label) => {
|
||||
const name = label.name.toLowerCase();
|
||||
if (name.includes('p0')) {
|
||||
distribution.p0++;
|
||||
found = true;
|
||||
} else if (name.includes('p1')) {
|
||||
distribution.p1++;
|
||||
found = true;
|
||||
} else if (name.includes('p2')) {
|
||||
distribution.p2++;
|
||||
found = true;
|
||||
} else if (name.includes('p3')) {
|
||||
distribution.p3++;
|
||||
found = true;
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
distribution.other++;
|
||||
}
|
||||
});
|
||||
|
||||
process.stdout.write(`priority_p0_count,${distribution.p0}\n`);
|
||||
process.stdout.write(`priority_p1_count,${distribution.p1}\n`);
|
||||
process.stdout.write(`priority_p2_count,${distribution.p2}\n`);
|
||||
process.stdout.write(`priority_p3_count,${distribution.p3}\n`);
|
||||
process.stdout.write(`priority_none_count,${distribution.other}\n`);
|
||||
|
||||
} catch (error) {
|
||||
process.stderr.write(
|
||||
error instanceof Error ? error.message : String(error),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
@@ -2,13 +2,33 @@
|
||||
* @license
|
||||
* Copyright 2026 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*
|
||||
* @license
|
||||
*/
|
||||
|
||||
import { GITHUB_OWNER, GITHUB_REPO } from '../types.js';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
/**
|
||||
* Checks if the author association belongs to a maintainer.
|
||||
*/
|
||||
const isMaintainer = (assoc: string) =>
|
||||
['MEMBER', 'OWNER', 'COLLABORATOR'].includes(assoc);
|
||||
|
||||
interface Item {
|
||||
association: string;
|
||||
date: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates items per day over the sample period.
|
||||
*/
|
||||
const calculateThroughput = (items: Item[]) => {
|
||||
if (items.length < 2) return 0;
|
||||
const first = items[0].date;
|
||||
const last = items[items.length - 1].date;
|
||||
const days = (last - first) / (1000 * 60 * 60 * 24);
|
||||
return days > 0 ? items.length / days : items.length;
|
||||
};
|
||||
|
||||
try {
|
||||
const query = `
|
||||
query($owner: String!, $repo: String!) {
|
||||
@@ -25,68 +45,64 @@ try {
|
||||
closedAt
|
||||
}
|
||||
}
|
||||
arrival: issues(last: 100) {
|
||||
nodes {
|
||||
createdAt
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
const output = execSync(
|
||||
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
|
||||
{ encoding: 'utf-8' },
|
||||
);
|
||||
{ encoding: 'utf-8', stdio: ['ignore', 'pipe', 'ignore'] },
|
||||
).trim();
|
||||
const data = JSON.parse(output).data.repository;
|
||||
|
||||
const prs = data.pullRequests.nodes
|
||||
const prs: Item[] = data.pullRequests.nodes
|
||||
.map((p: { authorAssociation: string; mergedAt: string }) => ({
|
||||
association: p.authorAssociation,
|
||||
date: new Date(p.mergedAt).getTime(),
|
||||
}))
|
||||
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
|
||||
.sort((a: Item, b: Item) => a.date - b.date);
|
||||
|
||||
const issues = data.issues.nodes
|
||||
const issues: Item[] = data.issues.nodes
|
||||
.map((i: { authorAssociation: string; closedAt: string }) => ({
|
||||
association: i.authorAssociation,
|
||||
date: new Date(i.closedAt).getTime(),
|
||||
}))
|
||||
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
|
||||
.sort((a: Item, b: Item) => a.date - b.date);
|
||||
|
||||
const isMaintainer = (assoc: string) =>
|
||||
['MEMBER', 'OWNER', 'COLLABORATOR'].includes(assoc);
|
||||
const arrivalDates = data.arrival.nodes
|
||||
.map((i: { createdAt: string }) => new Date(i.createdAt).getTime())
|
||||
.sort((a: number, b: number) => a - b);
|
||||
|
||||
const calculateThroughput = (
|
||||
items: { association: string; date: number }[],
|
||||
) => {
|
||||
if (items.length < 2) return 0;
|
||||
const first = items[0].date;
|
||||
const last = items[items.length - 1].date;
|
||||
const calculateArrivalRate = (dates: number[]) => {
|
||||
if (dates.length < 2) return 0;
|
||||
const first = dates[0];
|
||||
const last = dates[dates.length - 1];
|
||||
const days = (last - first) / (1000 * 60 * 60 * 24);
|
||||
return days > 0 ? items.length / days : items.length; // items per day
|
||||
return days > 0 ? dates.length / days : dates.length;
|
||||
};
|
||||
|
||||
const prOverall = calculateThroughput(prs);
|
||||
const prMaintainers = calculateThroughput(
|
||||
prs.filter((i: { association: string; date: number }) =>
|
||||
isMaintainer(i.association),
|
||||
),
|
||||
prs.filter((i) => isMaintainer(i.association)),
|
||||
);
|
||||
const prCommunity = calculateThroughput(
|
||||
prs.filter(
|
||||
(i: { association: string; date: number }) =>
|
||||
!isMaintainer(i.association),
|
||||
),
|
||||
prs.filter((i) => !isMaintainer(i.association)),
|
||||
);
|
||||
|
||||
const issueOverall = calculateThroughput(issues);
|
||||
const issueMaintainers = calculateThroughput(
|
||||
issues.filter((i: { association: string; date: number }) =>
|
||||
isMaintainer(i.association),
|
||||
),
|
||||
issues.filter((i) => isMaintainer(i.association)),
|
||||
);
|
||||
const issueCommunity = calculateThroughput(
|
||||
issues.filter(
|
||||
(i: { association: string; date: number }) =>
|
||||
!isMaintainer(i.association),
|
||||
),
|
||||
issues.filter((i) => !isMaintainer(i.association)),
|
||||
);
|
||||
|
||||
const arrivalRate = calculateArrivalRate(arrivalDates);
|
||||
|
||||
process.stdout.write(
|
||||
`throughput_pr_overall_per_day,${Math.round(prOverall * 100) / 100}\n`,
|
||||
);
|
||||
@@ -105,6 +121,9 @@ try {
|
||||
process.stdout.write(
|
||||
`throughput_issue_community_per_day,${Math.round(issueCommunity * 100) / 100}\n`,
|
||||
);
|
||||
process.stdout.write(
|
||||
`throughput_issue_arrival_rate_per_day,${Math.round(arrivalRate * 100) / 100}\n`,
|
||||
);
|
||||
process.stdout.write(
|
||||
`throughput_issue_overall_days_per_issue,${issueOverall > 0 ? Math.round((1 / issueOverall) * 100) / 100 : 0}\n`,
|
||||
);
|
||||
|
||||
Reference in New Issue
Block a user