ci: Add automated QA metrics reporting to PRs (#28003)

This commit is contained in:
Declan Carroll 2026-04-07 14:17:01 +01:00 committed by GitHub
parent 14e0c10f4d
commit 7ed34d7f85
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 229 additions and 12 deletions

View File

@ -0,0 +1,136 @@
#!/usr/bin/env node
/**
* Fetches QA metric comparisons and posts/updates a PR comment.
*
* Usage:
* node .github/scripts/post-qa-metrics-comment.mjs --metrics memory-heap-used-baseline
* node .github/scripts/post-qa-metrics-comment.mjs --metrics memory-heap-used-baseline --pr 27880 --dry-run
*
* Env:
* QA_METRICS_COMMENT_WEBHOOK_URL - n8n workflow webhook (required)
* QA_METRICS_WEBHOOK_USER/PASSWORD - Basic auth for webhook
* GITHUB_TOKEN - For posting comments (not needed with --dry-run)
* GITHUB_REF, GITHUB_REPOSITORY, GITHUB_SHA - Auto-set in CI
*/
import { parseArgs } from 'node:util';
const MARKER = '<!-- n8n-qa-metrics-comparison -->';
const { values } = parseArgs({
options: {
metrics: { type: 'string' },
pr: { type: 'string' },
'baseline-days': { type: 'string', default: '14' },
'dry-run': { type: 'boolean', default: false },
},
strict: true,
});
const metrics = values.metrics?.split(',').map((m) => m.trim());
if (!metrics?.length) {
console.error('--metrics is required (comma-separated metric names)');
process.exit(1);
}
const pr = parseInt(values.pr ?? inferPr(), 10);
if (!pr) {
console.error('--pr is required (or set GITHUB_REF)');
process.exit(1);
}
const webhookUrl = process.env.QA_METRICS_COMMENT_WEBHOOK_URL;
if (!webhookUrl) {
console.error('QA_METRICS_COMMENT_WEBHOOK_URL is required');
process.exit(1);
}
const repo = process.env.GITHUB_REPOSITORY ?? 'n8n-io/n8n';
const sha = process.env.GITHUB_SHA?.slice(0, 8) ?? '';
const baselineDays = parseInt(values['baseline-days'], 10);
// --- Fetch ---
const headers = { 'Content-Type': 'application/json' };
const user = process.env.QA_METRICS_WEBHOOK_USER;
const pass = process.env.QA_METRICS_WEBHOOK_PASSWORD;
if (user && pass) {
headers.Authorization = `Basic ${Buffer.from(`${user}:${pass}`).toString('base64')}`;
}
console.log(`PR #${pr}: fetching ${metrics.join(', ')} (${baselineDays}-day baseline)`);
const res = await fetch(webhookUrl, {
method: 'POST',
headers,
body: JSON.stringify({
pr_number: pr,
github_repo: repo,
git_sha: sha,
baseline_days: baselineDays,
metric_names: metrics,
}),
signal: AbortSignal.timeout(60_000),
});
if (!res.ok) {
const text = await res.text().catch(() => '');
console.error(`Webhook failed: ${res.status} ${res.statusText}\n${text}`);
process.exit(1);
}
const { markdown, has_data } = await res.json();
if (!has_data || !markdown) {
console.log('No metric data available, skipping.');
process.exit(0);
}
if (values['dry-run']) {
console.log('\n--- DRY RUN ---\n');
console.log(markdown);
process.exit(0);
}
// --- Post comment ---
const token = process.env.GITHUB_TOKEN;
if (!token) {
console.error('GITHUB_TOKEN is required to post comments');
process.exit(1);
}
const [owner, repoName] = repo.split('/');
const ghHeaders = {
Accept: 'application/vnd.github+json',
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
};
const comments = await fetch(
`https://api.github.com/repos/${owner}/${repoName}/issues/${pr}/comments?per_page=100`,
{ headers: ghHeaders },
).then((r) => r.json());
const existing = Array.isArray(comments)
? comments.find((c) => c.body?.includes(MARKER))
: null;
if (existing) {
await fetch(
`https://api.github.com/repos/${owner}/${repoName}/issues/comments/${existing.id}`,
{ method: 'PATCH', headers: ghHeaders, body: JSON.stringify({ body: markdown }) },
);
console.log(`Updated comment ${existing.id}`);
} else {
const created = await fetch(
`https://api.github.com/repos/${owner}/${repoName}/issues/${pr}/comments`,
{ method: 'POST', headers: ghHeaders, body: JSON.stringify({ body: markdown }) },
).then((r) => r.json());
console.log(`Created comment ${created.id}`);
}
function inferPr() {
const match = (process.env.GITHUB_REF ?? '').match(/refs\/pull\/(\d+)/);
return match?.[1];
}

View File

@ -18,6 +18,18 @@ import { existsSync, readFileSync } from 'node:fs';
import { sendMetrics, metric } from './send-metrics.mjs';
/** Parse human-readable sizes (e.g. "1.5G", "500M", "12K") to MB. */
function parseSizeToMB(val) {
if (typeof val === 'number') return val / (1024 * 1024);
if (typeof val !== 'string') return null;
const match = val.match(/^([\d.]+)\s*([KMGT]?)i?B?$/i);
if (!match) return null;
const num = parseFloat(match[1]);
const suffix = match[2].toUpperCase();
const toMB = { '': 1 / (1024 * 1024), 'K': 1 / 1024, 'M': 1, 'G': 1024, 'T': 1024 * 1024 };
return Math.round(num * (toMB[suffix] ?? 1) * 100) / 100;
}
const buildManifestPath = 'compiled/build-manifest.json';
const dockerManifestPath = 'docker-build-manifest.json';
@ -37,11 +49,13 @@ const dockerManifest = existsSync(dockerManifestPath)
const metrics = [];
if (buildManifest) {
if (buildManifest.artifactSize != null) {
metrics.push(metric('artifact-size', buildManifest.artifactSize, 'bytes', { artifact: 'compiled' }));
const sizeMB = parseSizeToMB(buildManifest.artifactSize);
if (sizeMB != null) {
metrics.push(metric('artifact-size', sizeMB, 'MB', { artifact: 'compiled' }));
}
if (buildManifest.buildDuration != null) {
metrics.push(metric('build-duration', buildManifest.buildDuration / 1000, 's', { artifact: 'compiled' }));
const duration = buildManifest.buildDuration;
if (duration?.total != null) {
metrics.push(metric('build-duration', duration.total / 1000, 's', { artifact: 'compiled' }));
}
}
@ -49,12 +63,12 @@ if (dockerManifest) {
const platform = dockerManifest.platform ?? 'unknown';
for (const image of dockerManifest.images ?? []) {
if (image.sizeBytes != null) {
const imageSizeMB = parseSizeToMB(image.size ?? image.sizeBytes);
const imageName = image.imageName ?? image.name ?? 'unknown';
const shortName = imageName.replace(/^n8nio\//, '').replace(/:.*$/, '');
if (imageSizeMB != null) {
metrics.push(
metric('docker-image-size', image.sizeBytes, 'bytes', {
image: image.name ?? 'unknown',
platform,
}),
metric(`docker-image-size-${shortName}`, imageSizeMB, 'MB', { platform }),
);
}
}

View File

@ -27,6 +27,7 @@ jobs:
db: ${{ fromJSON(steps.ci-filter.outputs.results).db == true }}
design_system: ${{ fromJSON(steps.ci-filter.outputs.results)['design-system'] == true }}
performance: ${{ fromJSON(steps.ci-filter.outputs.results).performance == true }}
e2e_performance: ${{ fromJSON(steps.ci-filter.outputs.results)['e2e-performance'] == true }}
commit_sha: ${{ steps.commit-sha.outputs.sha }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@ -69,6 +70,11 @@ jobs:
packages/testing/performance/**
packages/workflow/src/**
.github/workflows/test-bench-reusable.yml
e2e-performance:
packages/testing/playwright/tests/performance/**
packages/testing/playwright/utils/performance-helper.ts
packages/testing/containers/**
.github/workflows/test-e2e-performance-reusable.yml
db:
packages/cli/src/databases/**
packages/cli/src/modules/*/database/**
@ -167,6 +173,16 @@ jobs:
with:
ref: ${{ needs.install-and-build.outputs.commit_sha }}
e2e-performance:
name: E2E Performance
needs: install-and-build
if: >-
(needs.install-and-build.outputs.ci == 'true' || needs.install-and-build.outputs.e2e_performance == 'true') &&
github.event_name == 'pull_request' &&
github.repository == 'n8n-io/n8n'
uses: ./.github/workflows/test-e2e-performance-reusable.yml
secrets: inherit
security-checks:
name: Security Checks
needs: install-and-build
@ -224,3 +240,14 @@ jobs:
with:
mode: validate
job-results: ${{ toJSON(needs) }}
# Posts a QA metrics comparison comment on the PR.
# Runs after all checks so any job can emit metrics before this reports.
post-qa-metrics-comment:
name: QA Metrics
needs: [required-checks, e2e-performance]
if: always()
uses: ./.github/workflows/util-qa-metrics-comment-reusable.yml
with:
metrics: memory-heap-used-baseline,docker-image-size-n8n,docker-image-size-runners
secrets: inherit

View File

@ -5,9 +5,6 @@ on:
workflow_dispatch:
schedule:
- cron: '0 0 * * *' # Runs daily at midnight
pull_request:
paths:
- '.github/workflows/test-e2e-performance-reusable.yml'
jobs:
build-and-test-performance:

View File

@ -130,6 +130,9 @@ jobs:
enable-docker-cache: ${{ inputs.test-mode == 'docker-build' }}
env:
INCLUDE_TEST_CONTROLLER: ${{ inputs.test-mode == 'docker-build' && 'true' || '' }}
QA_METRICS_WEBHOOK_URL: ${{ secrets.QA_METRICS_WEBHOOK_URL }}
QA_METRICS_WEBHOOK_USER: ${{ secrets.QA_METRICS_WEBHOOK_USER }}
QA_METRICS_WEBHOOK_PASSWORD: ${{ secrets.QA_METRICS_WEBHOOK_PASSWORD }}
- name: Install Browsers
run: pnpm turbo run install-browsers --filter=n8n-playwright

View File

@ -0,0 +1,40 @@
name: 'QA: Metrics PR Comment'
on:
workflow_call:
inputs:
metrics:
description: 'Comma-separated list of metric names to report'
required: true
type: string
baseline-days:
description: 'Number of days for the rolling baseline'
required: false
type: number
default: 14
jobs:
post-comment:
name: Post Metrics Comment
if: >-
github.event_name == 'pull_request' &&
!github.event.pull_request.head.repo.fork
runs-on: ubuntu-slim
continue-on-error: true
permissions:
pull-requests: write
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
sparse-checkout: .github/scripts/post-qa-metrics-comment.mjs
sparse-checkout-cone-mode: false
- name: Post QA metrics comparison
env:
QA_METRICS_COMMENT_WEBHOOK_URL: ${{ secrets.QA_METRICS_COMMENT_WEBHOOK_URL }}
QA_METRICS_WEBHOOK_USER: ${{ secrets.QA_METRICS_WEBHOOK_USER }}
QA_METRICS_WEBHOOK_PASSWORD: ${{ secrets.QA_METRICS_WEBHOOK_PASSWORD }}
GITHUB_TOKEN: ${{ github.token }}
run: |
node .github/scripts/post-qa-metrics-comment.mjs \
--metrics "${{ inputs.metrics }}" \
--baseline-days "${{ inputs.baseline-days }}"