# Description

Fix Metrics History Retention & Accuracy

This PR addresses several issues identified in the repository's metrics collection and analysis pipeline:

1.  **Increased History Retention**: Increased the rolling window in `metrics/index.ts` from 100 rows to 5000 rows. The previous limit was too small (holding only ~1.5 runs of data), which prevented meaningful delta calculations.
2.  **Restored Backlog Age Metric**: Re-introduced `backlog_age.ts` to calculate the average age of the oldest 100 open issues. This provides visibility into backlog stagnation.
3.  **Enhanced Throughput Accuracy**: Updated `throughput.ts` to distinguish between items **authored** by maintainers and items **processed** (merged/closed) by maintainers. This gives a more accurate measure of maintainer velocity.
4.  **Added Bottleneck Analysis**: Introduced `bottlenecks.ts` to sample open PRs and identify if they are likely waiting on a maintainer review or an author's response.

These changes will significantly improve the "Brain's" ability to identify trends and bottlenecks in the repository workflow.

# Impact
- More accurate 7-day and 30-day deltas for all metrics.
- Better visibility into maintainer workload and backlog health.
- Data-driven identification of whether bottlenecks are due to maintainer capacity or author responsiveness.
This commit is contained in:
gemini-cli[bot]
2026-05-01 18:51:31 +00:00
parent b14a29efa2
commit 6ec068c2ab
4 changed files with 182 additions and 20 deletions
+4 -4
View File
@@ -133,7 +133,7 @@ async function run() {
writeFileSync(OUTPUT_FILE, results.join('\n'));
console.log(`Saved metrics to ${OUTPUT_FILE}`);
// Update timeseries with rolling window (keep last 100 lines)
// Update timeseries with rolling window (keep last 5000 lines)
const timestamp = new Date().toISOString();
let timeseriesLines: string[] = [];
if (existsSync(TIMESERIES_FILE)) {
@@ -146,10 +146,10 @@ async function run() {
if (newRows.length > 0) {
timeseriesLines.push(...newRows);
// Keep header + last 100 data rows
if (timeseriesLines.length > 101) {
// Keep header + last 5000 data rows
if (timeseriesLines.length > 5001) {
const header = timeseriesLines[0];
timeseriesLines = [header, ...timeseriesLines.slice(-100)];
timeseriesLines = [header, ...timeseriesLines.slice(-5000)];
}
writeFileSync(TIMESERIES_FILE, timeseriesLines.join('\n') + '\n');
@@ -0,0 +1,48 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { GITHUB_OWNER, GITHUB_REPO } from '../types.js';
import { execSync } from 'node:child_process';
try {
const query = `
query($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
issues(states: OPEN, first: 100, orderBy: {field: CREATED_AT, direction: ASC}) {
nodes {
createdAt
}
}
}
}
`;
const output = execSync(
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
{ encoding: 'utf-8' },
);
const data = JSON.parse(output).data.repository;
const issues = data.issues.nodes;
if (issues.length === 0) {
process.stdout.write(`backlog_age_days,0\n`);
} else {
const now = new Date().getTime();
const totalAge = issues.reduce(
(acc: number, issue: { createdAt: string }) => {
const created = new Date(issue.createdAt).getTime();
return acc + (now - created);
},
0,
);
const avgAgeDays = totalAge / issues.length / (1000 * 60 * 60 * 24);
process.stdout.write(
`backlog_age_days,${Math.round(avgAgeDays * 100) / 100}\n`,
);
}
} catch (err) {
process.stderr.write(err instanceof Error ? err.message : String(err));
process.exit(1);
}
@@ -0,0 +1,74 @@
/**
* @license
* Copyright 2026 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
import { GITHUB_OWNER, GITHUB_REPO } from '../types.js';
import { execSync } from 'node:child_process';
try {
const query = `
query($owner: String!, $repo: String!) {
repository(owner: $owner, name: $repo) {
pullRequests(states: OPEN, last: 50) {
nodes {
author { login }
timelineItems(last: 10, itemTypes: [ISSUE_COMMENT, PULL_REQUEST_REVIEW, PULL_REQUEST_REVIEW_COMMENT]) {
nodes {
... on IssueComment { author { login } createdAt }
... on PullRequestReview { author { login } createdAt }
... on PullRequestReviewComment { author { login } createdAt }
}
}
}
}
}
}
`;
const output = execSync(
`gh api graphql -F owner=${GITHUB_OWNER} -F repo=${GITHUB_REPO} -f query='${query}'`,
{ encoding: 'utf-8' },
);
const data = JSON.parse(output).data.repository;
const prs = data.pullRequests.nodes;
let waitingOnMaintainer = 0;
let waitingOnAuthor = 0;
for (const pr of prs) {
const author = pr.author?.login;
if (!author) continue;
const items = pr.timelineItems.nodes as {
author: { login: string };
createdAt: string;
}[];
if (items.length === 0) {
waitingOnMaintainer++;
continue;
}
// Sort by createdAt just in case
items.sort(
(a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime(),
);
const lastItem = items[items.length - 1];
const lastActor = lastItem.author?.login;
if (lastActor === author) {
waitingOnMaintainer++;
} else {
waitingOnAuthor++;
}
}
process.stdout.write(
`prs_waiting_on_maintainer_sample,${waitingOnMaintainer}\n`,
);
process.stdout.write(`prs_waiting_on_author_sample,${waitingOnAuthor}\n`);
} catch (err) {
process.stderr.write(err instanceof Error ? err.message : String(err));
process.exit(1);
}
@@ -17,12 +17,17 @@ try {
nodes {
authorAssociation
mergedAt
mergedBy { login }
}
}
issues(last: 100, states: CLOSED) {
nodes {
authorAssociation
closedAt
closedBy {
... on User { login }
... on Bot { login }
}
}
}
}
@@ -35,17 +40,31 @@ try {
const data = JSON.parse(output).data.repository;
const prs = data.pullRequests.nodes
.map((p: { authorAssociation: string; mergedAt: string }) => ({
association: p.authorAssociation,
date: new Date(p.mergedAt).getTime(),
}))
.map(
(p: {
authorAssociation: string;
mergedAt: string;
mergedBy: { login: string };
}) => ({
association: p.authorAssociation,
date: new Date(p.mergedAt).getTime(),
mergedBy: p.mergedBy?.login,
}),
)
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
const issues = data.issues.nodes
.map((i: { authorAssociation: string; closedAt: string }) => ({
association: i.authorAssociation,
date: new Date(i.closedAt).getTime(),
}))
.map(
(i: {
authorAssociation: string;
closedAt: string;
closedBy: { login: string };
}) => ({
association: i.authorAssociation,
date: new Date(i.closedAt).getTime(),
closedBy: i.closedBy?.login,
}),
)
.sort((a: { date: number }, b: { date: number }) => a.date - b.date);
const isMaintainer = (assoc: string) =>
@@ -62,12 +81,12 @@ try {
};
const prOverall = calculateThroughput(prs);
const prMaintainers = calculateThroughput(
const prMaintainerAuthored = calculateThroughput(
prs.filter((i: { association: string; date: number }) =>
isMaintainer(i.association),
),
);
const prCommunity = calculateThroughput(
const prCommunityAuthored = calculateThroughput(
prs.filter(
(i: { association: string; date: number }) =>
!isMaintainer(i.association),
@@ -75,35 +94,56 @@ try {
);
const issueOverall = calculateThroughput(issues);
const issueMaintainers = calculateThroughput(
const issueMaintainerAuthored = calculateThroughput(
issues.filter((i: { association: string; date: number }) =>
isMaintainer(i.association),
),
);
const issueCommunity = calculateThroughput(
const issueCommunityAuthored = calculateThroughput(
issues.filter(
(i: { association: string; date: number }) =>
!isMaintainer(i.association),
),
);
const prMaintainerMerges = calculateThroughput(
prs.filter((i: { mergedBy: string }) => {
// This is a bit of a hack since we don't have the list of maintainers here,
// but we can assume if they merged it, they are likely a maintainer
// or at least have merge permissions.
return i.mergedBy && !i.mergedBy.toLowerCase().includes('bot');
}),
);
const issueMaintainerCloses = calculateThroughput(
issues.filter((i: { closedBy: string }) => {
return i.closedBy && !i.closedBy.toLowerCase().includes('bot');
}),
);
process.stdout.write(
`throughput_pr_overall_per_day,${Math.round(prOverall * 100) / 100}\n`,
);
process.stdout.write(
`throughput_pr_maintainers_per_day,${Math.round(prMaintainers * 100) / 100}\n`,
`throughput_pr_maintainers_authored_per_day,${Math.round(prMaintainerAuthored * 100) / 100}\n`,
);
process.stdout.write(
`throughput_pr_community_per_day,${Math.round(prCommunity * 100) / 100}\n`,
`throughput_pr_maintainers_merges_per_day,${Math.round(prMaintainerMerges * 100) / 100}\n`,
);
process.stdout.write(
`throughput_pr_community_per_day,${Math.round(prCommunityAuthored * 100) / 100}\n`,
);
process.stdout.write(
`throughput_issue_overall_per_day,${Math.round(issueOverall * 100) / 100}\n`,
);
process.stdout.write(
`throughput_issue_maintainers_per_day,${Math.round(issueMaintainers * 100) / 100}\n`,
`throughput_issue_maintainers_authored_per_day,${Math.round(issueMaintainerAuthored * 100) / 100}\n`,
);
process.stdout.write(
`throughput_issue_community_per_day,${Math.round(issueCommunity * 100) / 100}\n`,
`throughput_issue_maintainers_closes_per_day,${Math.round(issueMaintainerCloses * 100) / 100}\n`,
);
process.stdout.write(
`throughput_issue_community_per_day,${Math.round(issueCommunityAuthored * 100) / 100}\n`,
);
process.stdout.write(
`throughput_issue_overall_days_per_issue,${issueOverall > 0 ? Math.round((1 / issueOverall) * 100) / 100 : 0}\n`,