Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/publish-prerelease.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ jobs:
MERGE_BASE_COMMIT_HASH: '' # placeholder so that we have autocomplete and logs
CLOUDFRONT_REPO_URL: ${{ vars.AWS_CLOUDFRONT_URL }}/${{ github.event.repository.name }}
HOST_URL: ${{ vars.AWS_CLOUDFRONT_URL }}/${{ github.event.repository.name }}/${{ github.run_id }}
SENTRY_DSN_PERFORMANCE: ${{ vars.SENTRY_DSN_PERFORMANCE }}
BENCHMARK_WORKFLOW_RUN_ID: ${{ github.event.workflow_run.id || github.run_id }}
LAVAMOAT_POLICY_CHANGED: ${{ inputs.lavamoat-policy-changed }}
POST_NEW_BUILDS: ${{ inputs.post-new-builds }}
Expand Down
5 changes: 3 additions & 2 deletions .github/workflows/run-benchmarks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ jobs:
- name: Send benchmark results to Sentry (main/release only)
if: >-
${{
vars.SENTRY_DSN_PERFORMANCE &&
env.SENTRY_DSN_PERFORMANCE &&
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/'))
}}
run: |
Expand Down Expand Up @@ -183,6 +183,7 @@ jobs:
REPOSITORY: ${{ github.event.repository.name }}
PR_NUMBER: ${{ github.event.pull_request.number || 'none' }}
HEAD_COMMIT_HASH: ${{ github.event.pull_request.head.sha || github.sha }}
# Job env mirrors matrix job; step `if:` uses env.SENTRY_DSN_PERFORMANCE (same as send-to-sentry’s process.env).
SENTRY_DSN_PERFORMANCE: ${{ vars.SENTRY_DSN_PERFORMANCE }}
BENCHMARK_BROWSER_LOADS: '10'
BENCHMARK_PAGE_LOADS: '10'
Expand Down Expand Up @@ -210,7 +211,7 @@ jobs:
- name: Send benchmark results to Sentry (main/release only)
if: >-
${{
vars.SENTRY_DSN_PERFORMANCE &&
env.SENTRY_DSN_PERFORMANCE &&
(github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/'))
}}
run: |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ describe('compare-benchmarks', () => {
it('passes when results are within thresholds', () => {
const benchmarks = [
{
name: 'benchmark-chrome-browserify-userJourneyOnboardingImport',
name: 'benchmark-chrome-webpack-userJourneyOnboardingImport',
data: {
onboardingImportWallet: makeBenchmarkResults({
p75: { importWalletToSocialScreen: 1500 },
Expand All @@ -66,7 +66,7 @@ describe('compare-benchmarks', () => {
it('fails when p75 exceeds fail threshold', () => {
const benchmarks = [
{
name: 'benchmark-chrome-browserify-userJourneyOnboardingImport',
name: 'benchmark-chrome-webpack-userJourneyOnboardingImport',
data: {
onboardingImportWallet: makeBenchmarkResults({
p75: { importWalletToSocialScreen: 99999 },
Expand All @@ -84,7 +84,7 @@ describe('compare-benchmarks', () => {
it('includes relative metrics when baseline is available', () => {
const benchmarks = [
{
name: 'benchmark-chrome-browserify-userJourneyOnboardingImport',
name: 'benchmark-chrome-webpack-userJourneyOnboardingImport',
data: {
onboardingImportWallet: makeBenchmarkResults({
p75: { importWalletToSocialScreen: 1500 },
Expand Down Expand Up @@ -124,7 +124,7 @@ describe('compare-benchmarks', () => {
it('resolves page-load baseline for startup benchmarks', () => {
const benchmarks = [
{
name: 'benchmark-chrome-browserify-startupStandardHome',
name: 'benchmark-chrome-webpack-startupStandardHome',
data: {
startupStandardHome: makeBenchmarkResults({
p75: { uiStartup: 1800 },
Expand Down Expand Up @@ -304,7 +304,7 @@ describe('printReport', () => {
);
});

it('prints pass icon with [Show logs] when all metrics pass', () => {
it('prints pass icon with CI log when all metrics pass', () => {
const { COMPARISON_SEVERITY: SEV } = jest.requireActual(
'./comparison-utils',
) as typeof import('./comparison-utils');
Expand All @@ -329,7 +329,7 @@ describe('printReport', () => {
});

const allCalls = consoleSpy.mock.calls.flat().join('\n');
expect(allCalls).toContain('[Show logs]');
expect(allCalls).toContain('[CI log]');
});

it('prints issue metric lines for comparisons with violations', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ export function printReport(result: {
} else {
const issueLines = lines.filter((line) => line.hasIssue);
if (issueLines.length === 0) {
console.log(`${COMPARISON_SEVERITY.Pass.icon} [Show logs]`);
console.log(`${COMPARISON_SEVERITY.Pass.icon} [CI log]`);
} else {
for (const line of issueLines) {
const details = line.details ? ` | ${line.details}` : '';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -366,12 +366,10 @@ describe('THRESHOLD_REGISTRY', () => {
expect(THRESHOLD_REGISTRY.loadNewAccount).toBeDefined();
expect(THRESHOLD_REGISTRY.confirmTx).toBeDefined();
expect(THRESHOLD_REGISTRY.bridgeUserActions).toBeDefined();
expect(
THRESHOLD_REGISTRY['chrome-browserify-loadNewAccount'],
).toBeUndefined();
expect(THRESHOLD_REGISTRY['chrome-webpack-loadNewAccount']).toBeUndefined();
});

it('has platform-agnostic keys for user journey (chrome-browserify + chrome-webpack)', () => {
it('has platform-agnostic keys for user journey (no per-platform threshold keys)', () => {
expect(THRESHOLD_REGISTRY.onboardingImportWallet).toBeDefined();
expect(THRESHOLD_REGISTRY.swap).toBeDefined();
expect(THRESHOLD_REGISTRY['chrome-webpack-swap']).toBeUndefined();
Expand All @@ -381,7 +379,7 @@ describe('THRESHOLD_REGISTRY', () => {
expect(THRESHOLD_REGISTRY.startupStandardHome).toBeDefined();
expect(THRESHOLD_REGISTRY.startupPowerUserHome).toBeDefined();
expect(
THRESHOLD_REGISTRY['chrome-browserify-startupStandardHome'],
THRESHOLD_REGISTRY['chrome-webpack-startupStandardHome'],
).toBeUndefined();
expect(
THRESHOLD_REGISTRY['firefox-webpack-startupPowerUserHome'],
Expand Down
190 changes: 179 additions & 11 deletions development/metamaskbot-build-announce/performance-benchmarks.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -487,8 +487,8 @@ describe('buildBenchmarkSection', () => {
expect(html).toContain('<summary><b>Test</b></summary>');
expect(html).toContain('<table style=');
expect(html).toContain('<th>chrome-browserify</th>');
expect(html).toContain('<td>loadNewAccount</td>');
expect(html).toContain('<td>confirmTx</td>');
expect(html).toContain('<td align="left">loadNewAccount</td>');
expect(html).toContain('<td align="left">confirmTx</td>');
});

it('shows 🟢 in the cell when p95 is above baseline but within absolute threshold', () => {
Expand Down Expand Up @@ -662,7 +662,7 @@ describe('buildBenchmarkSection', () => {
expect(html).toContain('–');
});

it('links each cell to the entry artifact URL as [Show logs]', () => {
it('links each cell to the entry artifact URL as CI log', () => {
const ARTIFACT =
'https://cdn.example.com/benchmark-chrome-browserify-foo.json';
const html = buildBenchmarkSection(
Expand All @@ -672,10 +672,10 @@ describe('buildBenchmarkSection', () => {
);

expect(html).toContain(`href="${ARTIFACT}"`);
expect(html).toContain('[Show logs]');
expect(html).toContain('>[CI log]</a>');
});

it('falls back to runUrl for [Show logs] when entry has no artifactUrl', () => {
it('falls back to runUrl for CI log when entry has no artifactUrl', () => {
const RUN_URL = 'https://github.com/actions/runs/123';
const html = buildBenchmarkSection(
withEntries([makeEntry({ p95: { loadNewAccount: 672 } })]),
Expand All @@ -685,7 +685,7 @@ describe('buildBenchmarkSection', () => {
);

expect(html).toContain(`href="${RUN_URL}"`);
expect(html).toContain('[Show logs]');
expect(html).toContain('>[CI log]</a>');
});

it('resolves startup baseline via pageLoad/* key format and shows delta in bullet section', () => {
Expand Down Expand Up @@ -832,7 +832,7 @@ describe('buildBenchmarkSection', () => {
expect(html).toContain('<code>fetchAndDisplaySwapQuotes</code>');
});

it('shows [Show logs] without icon when timers are present', () => {
it('shows CI log without icon when timers are present', () => {
const entry = makeEntry({
benchmarkName: 'swap',
mean: { timer1: 100, timer2: 200 },
Expand All @@ -846,10 +846,10 @@ describe('buildBenchmarkSection', () => {
undefined,
'https://github.com/actions/runs/123',
);
expect(html).not.toMatch(/🟢 <a href=.*\[Show logs\]<br\/>/u);
expect(html).not.toMatch(/🟢 <a href=.*>\[CI log\]<\/a><br\/>/u);
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Test regex uses wrong self-closing tag, passes vacuously

Medium Severity

The test "shows CI log without icon when timers are present" uses regex <br\/> (self-closing) but the production code at line 887 generates <br> (no slash). This makes the .not.toMatch assertion pass vacuously regardless of actual output. Worse, the new code does include the icon before CI log when timers are present (linkLine = rowLinks ? \${icon} ${rowLinks}` : icon`), directly contradicting the test name. The test is silently not validating anything.

Additional Locations (1)
Fix in Cursor Fix in Web

Reviewed by Cursor Bugbot for commit d3bcbde. Configure here.

});

it('shows icon with [Show logs] when no timers present', () => {
it('shows icon with CI log when no timers present', () => {
const entry = makeEntry({
benchmarkName: 'loadNewAccount',
mean: {},
Expand All @@ -860,7 +860,7 @@ describe('buildBenchmarkSection', () => {

const html = buildBenchmarkSection(withEntries([entry]), 'Test');

expect(html).toContain('[Show logs]');
expect(html).toContain('>[CI log]</a>');
expect(html).toContain(COMPARISON_SEVERITY.Pass.icon);
expect(html).not.toContain('<ul');
});
Expand Down Expand Up @@ -905,11 +905,149 @@ describe('buildBenchmarkSection', () => {
);

expect(html).toContain(COMPARISON_SEVERITY.Pass.icon);
expect(html).toContain('[Show logs]');
expect(html).toContain('>[CI log]</a>');
expect(html).not.toContain('<code>uiStartup</code>');
expect(html).not.toContain('<ul');
});
});

describe('artifact and Sentry links', () => {
const mockDsn = 'https://fake@metamask.sentry.io/4510302346608640';
let savedBranch: string | undefined;
let savedHeadRef: string | undefined;
let savedRefName: string | undefined;
let savedSentryDsn: string | undefined;

beforeEach(() => {
savedBranch = process.env.BRANCH;
savedHeadRef = process.env.GITHUB_HEAD_REF;
savedRefName = process.env.GITHUB_REF_NAME;
savedSentryDsn = process.env.SENTRY_DSN_PERFORMANCE;
});

afterEach(() => {
if (savedBranch === undefined) {
delete process.env.BRANCH;
} else {
process.env.BRANCH = savedBranch;
}
if (savedHeadRef === undefined) {
delete process.env.GITHUB_HEAD_REF;
} else {
process.env.GITHUB_HEAD_REF = savedHeadRef;
}
if (savedRefName === undefined) {
delete process.env.GITHUB_REF_NAME;
} else {
process.env.GITHUB_REF_NAME = savedRefName;
}
if (savedSentryDsn === undefined) {
delete process.env.SENTRY_DSN_PERFORMANCE;
} else {
process.env.SENTRY_DSN_PERFORMANCE = savedSentryDsn;
}
});

it('includes Sentry Logs Explorer link when BRANCH and SENTRY_DSN_PERFORMANCE are set', () => {
process.env.BRANCH = 'feat/test';
process.env.SENTRY_DSN_PERFORMANCE = mockDsn;
const html = buildBenchmarkSection(
withEntries([
makeEntry({ artifactUrl: 'https://cdn.example.com/benchmark.json' }),
]),
'Test',
BASELINE_PASS,
);
expect(html).toContain('>[CI log]</a>');
expect(html).toContain('>[Sentry log · main/release]</a>');
expect(html).toContain('metamask.sentry.io/explore/logs');
});

it('uses GITHUB_REF_NAME for Sentry when BRANCH is unset', () => {
delete process.env.BRANCH;
delete process.env.GITHUB_HEAD_REF;
process.env.GITHUB_REF_NAME = 'release/1.0.0';
process.env.SENTRY_DSN_PERFORMANCE = mockDsn;
const html = buildBenchmarkSection(
withEntries([
makeEntry({ artifactUrl: 'https://cdn.example.com/benchmark.json' }),
]),
'Test',
BASELINE_PASS,
);
expect(html).toContain('>[Sentry log · main/release]</a>');
});

it('omits Sentry Logs Explorer link when SENTRY_DSN_PERFORMANCE is unset', () => {
process.env.BRANCH = 'feat/test';
delete process.env.SENTRY_DSN_PERFORMANCE;
const html = buildBenchmarkSection(
withEntries([
makeEntry({ artifactUrl: 'https://cdn.example.com/benchmark.json' }),
]),
'Test',
BASELINE_PASS,
);
expect(html).toContain('>[CI log]</a>');
expect(html).not.toContain('metamask.sentry.io/explore/logs');
});

it('still includes Sentry row link when branch env vars are unset (uses ci.branch:main in URL)', () => {
// Force-empty so CI-injected GITHUB_HEAD_REF cannot satisfy `||` chain (delete is not always enough).
process.env.BRANCH = '';
process.env.GITHUB_HEAD_REF = '';
process.env.GITHUB_REF_NAME = '';
process.env.SENTRY_DSN_PERFORMANCE = mockDsn;
const html = buildBenchmarkSection(
withEntries([
makeEntry({ artifactUrl: 'https://cdn.example.com/benchmark.json' }),
]),
'Test',
BASELINE_PASS,
);
expect(html).toContain('>[CI log]</a>');
expect(html).toContain('metamask.sentry.io/explore/logs');
expect(html).toContain('ci.branch%3Amain');
expect(html).toContain('message%3Abenchmark.loadNewAccount');
});

it('puts row Sentry under the benchmark name and CI log only in timer detail lines', () => {
process.env.BRANCH = 'main';
process.env.SENTRY_DSN_PERFORMANCE = mockDsn;
const entry = makeEntry({
benchmarkName: 'swap',
mean: {
openSwapPageFromHome: 310,
fetchAndDisplaySwapQuotes: 5000,
},
p75: {
openSwapPageFromHome: 340,
fetchAndDisplaySwapQuotes: 4500,
},
p95: {
openSwapPageFromHome: 400,
fetchAndDisplaySwapQuotes: 5500,
},
artifactUrl: 'https://cdn.example.com/swap.json',
});
const html = buildBenchmarkSection(
withEntries([entry]),
'Test',
undefined,
'https://github.com/actions/runs/999',
);
expect(html).toContain('<code>fetchAndDisplaySwapQuotes</code>');
expect(html).toContain('>[CI log]</a>');
expect(html).toContain(
'swap<br><a href="https://metamask.sentry.io/explore/logs/',
);
expect(html).toContain('message%3Abenchmark.swap');
expect(html).toContain('>[Sentry log · main/release]</a>');
expect(html).not.toMatch(
/<code>fetchAndDisplaySwapQuotes<\/code><br>\[Sentry log/u,
);
});
});
});

describe('buildPerformanceBenchmarksSection', () => {
Expand Down Expand Up @@ -1126,6 +1264,36 @@ describe('buildPerformanceBenchmarksSection', () => {
expect(html).toContain('startupStandardHome');
});

it('includes Sentry Logs Explorer links in matrix and regression list when BRANCH and SENTRY_DSN_PERFORMANCE are set', async () => {
const mockDsn = 'https://fake@metamask.sentry.io/4510302346608640';
const savedBranch = process.env.BRANCH;
const savedSentry = process.env.SENTRY_DSN_PERFORMANCE;
try {
process.env.BRANCH = 'main';
process.env.SENTRY_DSN_PERFORMANCE = mockDsn;
mockFetch.mockResolvedValue({
ok: true,
json: () => Promise.resolve(FAILING_PAYLOAD),
} as unknown as Response);

const html = await buildPerformanceBenchmarksSection(HOST);

expect(html).toContain('>[Sentry log · main/release]</a>');
expect(html).toContain('metamask.sentry.io/explore/logs');
} finally {
if (savedBranch === undefined) {
delete process.env.BRANCH;
} else {
process.env.BRANCH = savedBranch;
}
if (savedSentry === undefined) {
delete process.env.SENTRY_DSN_PERFORMANCE;
} else {
process.env.SENTRY_DSN_PERFORMANCE = savedSentry;
}
}
});

it('exercises getEntryRegressions with baseline data when entry is non-pass', async () => {
mockFetch.mockResolvedValue({
ok: true,
Expand Down
Loading
Loading