diff --git a/dist/index.js b/dist/index.js
index 6c37a23..ddb77ac 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -95707,10 +95707,41 @@ var __webpack_exports__ = {};
});
return runsResponse.data;
}
+ async hasArtifactsForCommit(commitHash) {
+ try {
+ const workflowRuns = await this.findAllWorkflowRunsByCommit(commitHash);
+ for (const workflowRun of workflowRuns)try {
+ const runArtifacts = await this.listArtifactsForWorkflowRun(workflowRun.id);
+ if (runArtifacts.artifacts && runArtifacts.artifacts.length > 0) return true;
+ } catch (error) {
+ continue;
+ }
+ return false;
+ } catch (error) {
+ return false;
+ }
+ }
+ async getParentCommit(commitHash) {
+ const { owner, repo } = this.repository;
+ try {
+ const commitResponse = await this.octokit.rest.repos.getCommit({
+ owner,
+ repo,
+ ref: commitHash
+ });
+ if (commitResponse.data.parents && commitResponse.data.parents.length > 0) return commitResponse.data.parents[0].sha.substring(0, 10);
+ return null;
+ } catch (error) {
+ const apiError = error;
+ console.warn(`ā ļø Failed to get parent commit for ${commitHash}: ${apiError.message}`);
+ return null;
+ }
+ }
async getTargetBranchLatestCommit() {
const targetBranch = this.getTargetBranch();
console.log(`š Attempting to get latest commit for target branch: ${targetBranch}`);
console.log(`š Repository: ${this.repository.owner}/${this.repository.repo}`);
+ let latestCommitHash = null;
try {
console.log(`š” Trying to get latest commit from GitHub API...`);
const { owner, repo } = this.repository;
@@ -95721,9 +95752,8 @@ var __webpack_exports__ = {};
branch: targetBranch
});
if (branchResponse.data && branchResponse.data.commit) {
- const commitHash = branchResponse.data.commit.sha.substring(0, 10);
- console.log(`ā
Found commit hash from GitHub API: ${commitHash}`);
- return commitHash;
+ latestCommitHash = branchResponse.data.commit.sha.substring(0, 10);
+ console.log(`ā
Found commit hash from GitHub API: ${latestCommitHash}`);
}
} catch (error) {
const apiError = error;
@@ -95741,67 +95771,121 @@ var __webpack_exports__ = {};
branch: altBranch
});
if (altResponse.data && altResponse.data.commit) {
- const commitHash = altResponse.data.commit.sha.substring(0, 10);
- console.log(`ā
Found commit hash from alternative branch ${altBranch}: ${commitHash}`);
- return commitHash;
+ latestCommitHash = altResponse.data.commit.sha.substring(0, 10);
+ console.log(`ā
Found commit hash from alternative branch ${altBranch}: ${latestCommitHash}`);
+ break;
}
} catch (error) {
const altError = error;
console.log(`ā Alternative branch ${altBranch} also failed: ${altError.message}`);
}
}
- console.log(`š Trying to get from workflow runs...`);
- try {
- const runs = await this.listWorkflowRuns({
- branch: targetBranch,
- status: 'completed',
- limit: 10
- });
- if (runs.workflow_runs && runs.workflow_runs.length > 0) {
- console.log(`Found ${runs.workflow_runs.length} workflow runs for ${targetBranch}`);
- const successfulRun = runs.workflow_runs.find((run)=>'success' === run.conclusion);
- if (successfulRun) {
- console.log(`ā
Found successful workflow run for ${targetBranch}: ${successfulRun.head_sha}`);
- return successfulRun.head_sha.substring(0, 10);
+ if (!latestCommitHash) {
+ console.log(`š Trying to get from workflow runs...`);
+ try {
+ const runs = await this.listWorkflowRuns({
+ branch: targetBranch,
+ status: 'completed',
+ limit: 10
+ });
+ if (runs.workflow_runs && runs.workflow_runs.length > 0) {
+ console.log(`Found ${runs.workflow_runs.length} workflow runs for ${targetBranch}`);
+ const successfulRun = runs.workflow_runs.find((run)=>'success' === run.conclusion);
+ if (successfulRun) {
+ latestCommitHash = successfulRun.head_sha.substring(0, 10);
+ console.log(`ā
Found successful workflow run for ${targetBranch}: ${latestCommitHash}`);
+ } else {
+ const latestRun = runs.workflow_runs[0];
+ latestCommitHash = latestRun.head_sha.substring(0, 10);
+ console.log(`ā ļø No successful runs found, using latest workflow run for ${targetBranch}: ${latestCommitHash}`);
+ }
}
- const latestRun = runs.workflow_runs[0];
- console.log(`ā ļø No successful runs found, using latest workflow run for ${targetBranch}: ${latestRun.head_sha}`);
- return latestRun.head_sha.substring(0, 10);
+ } catch (error) {
+ const workflowError = error;
+ console.warn(`ā ļø Failed to get workflow runs: ${workflowError.message}`);
}
- } catch (error) {
- const workflowError = error;
- console.warn(`ā ļø Failed to get workflow runs: ${workflowError.message}`);
}
- console.log(`š§ No workflow runs found for ${targetBranch}, trying to fetch from remote...`);
- try {
- console.log(`š„ Running: git fetch origin`);
- (0, external_child_process_.execSync)('git fetch origin', {
- encoding: 'utf8'
- });
- console.log(`š„ Running: git rev-parse --short=10 origin/${targetBranch}`);
- const commitHash = (0, external_child_process_.execSync)(`git rev-parse --short=10 origin/${targetBranch}`, {
- encoding: 'utf8'
- }).trim();
- console.log(`ā
Found commit hash from git: ${commitHash}`);
- return commitHash;
- } catch (gitError) {
- console.warn(`ā Git fetch failed: ${gitError}`);
+ if (!latestCommitHash) {
+ console.log(`š§ No workflow runs found for ${targetBranch}, trying to fetch from remote...`);
try {
- console.log(`š„ Trying alternative: git ls-remote origin ${targetBranch}`);
- const remoteRef = (0, external_child_process_.execSync)(`git ls-remote origin ${targetBranch}`, {
+ console.log(`š„ Running: git fetch origin`);
+ (0, external_child_process_.execSync)('git fetch origin', {
+ encoding: 'utf8'
+ });
+ console.log(`š„ Running: git rev-parse --short=10 origin/${targetBranch}`);
+ latestCommitHash = (0, external_child_process_.execSync)(`git rev-parse --short=10 origin/${targetBranch}`, {
encoding: 'utf8'
}).trim();
- if (remoteRef) {
- const commitHash = remoteRef.split('\t')[0].substring(0, 10);
- console.log(`ā
Found commit hash from git ls-remote: ${commitHash}`);
- return commitHash;
+ console.log(`ā
Found commit hash from git: ${latestCommitHash}`);
+ } catch (gitError) {
+ console.warn(`ā Git fetch failed: ${gitError}`);
+ try {
+ console.log(`š„ Trying alternative: git ls-remote origin ${targetBranch}`);
+ const remoteRef = (0, external_child_process_.execSync)(`git ls-remote origin ${targetBranch}`, {
+ encoding: 'utf8'
+ }).trim();
+ if (remoteRef) {
+ latestCommitHash = remoteRef.split('\t')[0].substring(0, 10);
+ console.log(`ā
Found commit hash from git ls-remote: ${latestCommitHash}`);
+ }
+ } catch (altError) {
+ console.warn(`ā Alternative git command failed: ${altError}`);
}
- } catch (altError) {
- console.warn(`ā Alternative git command failed: ${altError}`);
}
}
- console.error(`ā All methods to get target branch commit have failed`);
- throw new Error(`Unable to get target branch (${targetBranch}) commit hash. Please ensure the branch exists and you have correct permissions.`);
+ if (!latestCommitHash) {
+ console.error(`ā All methods to get target branch commit have failed`);
+ throw new Error(`Unable to get target branch (${targetBranch}) commit hash. Please ensure the branch exists and you have correct permissions.`);
+ }
+ console.log(`š Checking if commit ${latestCommitHash} has baseline artifacts...`);
+ const hasArtifacts = await this.hasArtifactsForCommit(latestCommitHash);
+ if (hasArtifacts) {
+ console.log(`ā
Commit ${latestCommitHash} has baseline artifacts`);
+ return {
+ commitHash: latestCommitHash,
+ usedFallbackCommit: false
+ };
+ }
+ console.log(`ā ļø Commit ${latestCommitHash} does not have baseline artifacts`);
+ console.log(`š Looking for previous commits with baseline artifacts...`);
+ let currentCommit = latestCommitHash;
+ let checkedCommits = [
+ currentCommit
+ ];
+ const maxDepth = 5;
+ for(let depth = 0; depth < maxDepth; depth++){
+ const parentCommit = await this.getParentCommit(currentCommit);
+ if (!parentCommit) {
+ console.log(`ā ļø Reached the beginning of the branch, no more parent commits`);
+ break;
+ }
+ if (checkedCommits.includes(parentCommit)) {
+ console.log(`ā ļø Detected circular reference, stopping search`);
+ break;
+ }
+ checkedCommits.push(parentCommit);
+ console.log(`š Checking parent commit ${parentCommit}...`);
+ const parentHasArtifacts = await this.hasArtifactsForCommit(parentCommit);
+ if (parentHasArtifacts) {
+ console.log(`ā
Found commit ${parentCommit} with baseline artifacts`);
+ console.log(`\nā ļø Note: The latest commit (${latestCommitHash}) does not have baseline artifacts.`);
+ console.log(` Using commit ${parentCommit} for baseline comparison instead.`);
+ console.log(" If this seems incorrect, please wait a few minutes and try rerunning the workflow.");
+ return {
+ commitHash: parentCommit,
+ usedFallbackCommit: true,
+ latestCommitHash: latestCommitHash
+ };
+ }
+ currentCommit = parentCommit;
+ }
+ console.log(`\nā ļø No commits with baseline artifacts found in the last ${maxDepth} commits.`);
+ console.log(` Using latest commit ${latestCommitHash} anyway.`);
+ console.log(" Note: If baseline comparison fails, please wait a few minutes and try rerunning the workflow.");
+ return {
+ commitHash: latestCommitHash,
+ usedFallbackCommit: false
+ };
} catch (error) {
console.error(`ā Failed to get target branch commit: ${error}`);
console.error(`Repository: ${this.repository.owner}/${this.repository.repo}`);
@@ -96533,7 +96617,7 @@ var __webpack_exports__ = {};
}
return pathParts[0] || 'root';
}
- async function processSingleFile(fullPath, currentCommitHash, targetCommitHash) {
+ async function processSingleFile(fullPath, currentCommitHash, targetCommitHash, baselineUsedFallback, baselineLatestCommitHash) {
const fileName = external_path_default().basename(fullPath);
const relativePath = external_path_default().relative(process.cwd(), fullPath);
const pathParts = relativePath.split(external_path_default().sep);
@@ -96565,6 +96649,8 @@ var __webpack_exports__ = {};
if (baselineBundleAnalysis) {
report.baseline = baselineBundleAnalysis;
report.baselineCommitHash = targetCommitHash;
+ report.baselineUsedFallback = baselineUsedFallback;
+ report.baselineLatestCommitHash = baselineLatestCommitHash;
try {
const githubService = new GitHubService();
baselinePRs = await githubService.findPRsByCommit(targetCommitHash);
@@ -96655,10 +96741,16 @@ var __webpack_exports__ = {};
const currentCommitHash = githubService.getCurrentCommitHash();
console.log(`Current commit hash: ${currentCommitHash}`);
let targetCommitHash = null;
+ let baselineUsedFallback = false;
+ let baselineLatestCommitHash;
if (isPullRequestEvent()) try {
console.log('š Getting target branch commit hash...');
- targetCommitHash = await githubService.getTargetBranchLatestCommit();
+ const commitInfo = await githubService.getTargetBranchLatestCommit();
+ targetCommitHash = commitInfo.commitHash;
+ baselineUsedFallback = commitInfo.usedFallbackCommit;
+ baselineLatestCommitHash = commitInfo.latestCommitHash;
console.log(`ā
Target branch commit hash: ${targetCommitHash}`);
+ if (baselineUsedFallback && baselineLatestCommitHash) console.log(`ā ļø Using fallback commit: ${targetCommitHash} (latest: ${baselineLatestCommitHash})`);
} catch (error) {
console.error(`ā Failed to get target branch commit: ${error}`);
console.log('š No baseline data available for comparison');
@@ -96702,14 +96794,19 @@ var __webpack_exports__ = {};
} else if (isPR) {
console.log('š„ Detected pull request event - processing files');
for (const fullPath of matchedFiles){
- const report = await processSingleFile(fullPath, currentCommitHash, targetCommitHash);
+ const report = await processSingleFile(fullPath, currentCommitHash, targetCommitHash, baselineUsedFallback, baselineLatestCommitHash);
projectReports.push(report);
}
if (projectReports.length > 0) if (1 === projectReports.length) {
const report = projectReports[0];
- if (report.current) await generateBundleAnalysisReport(report.current, report.baseline || void 0, true, report.baselineCommitHash, report.baselinePRs);
+ if (report.current) {
+ if (report.baselineUsedFallback && report.baselineLatestCommitHash) await core.summary.addRaw(`> ā ļø **Note:** The latest commit (\`${report.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${report.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`);
+ await generateBundleAnalysisReport(report.current, report.baseline || void 0, true, report.baselineCommitHash, report.baselinePRs);
+ }
} else {
await core.summary.addHeading('š¦ Monorepo Bundle Analysis', 2);
+ const firstReport = projectReports.find((r)=>r.current);
+ if (firstReport?.baselineUsedFallback && firstReport?.baselineLatestCommitHash) await core.summary.addRaw(`> ā ļø **Note:** The latest commit (\`${firstReport.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${firstReport.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`);
for (const report of projectReports)if (report.current) {
await core.summary.addHeading(`š ${report.projectName}`, 3);
await core.summary.addRaw(`**Path:** \`${report.filePath}\``);
@@ -96721,7 +96818,28 @@ var __webpack_exports__ = {};
if (isPR && projectReports.length > 0) {
const { context } = __webpack_require__("./node_modules/.pnpm/@actions+github@4.0.0/node_modules/@actions/github/lib/github.js");
let commentBody = '## Rsdoctor Bundle Diff Analysis\n\n';
- if (projectReports.length > 1) commentBody += `Found ${projectReports.length} project(s) in monorepo.\n\n`;
+ const firstReport = projectReports.find((r)=>r.current);
+ if (firstReport?.baselineUsedFallback && firstReport?.baselineLatestCommitHash) commentBody += `> ā ļø **Note:** The latest commit (\`${firstReport.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${firstReport.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`;
+ const reportsWithCurrent = projectReports.filter((r)=>r.current);
+ if (reportsWithCurrent.length > 1) commentBody += `Found ${reportsWithCurrent.length} project(s) in monorepo.\n\n`;
+ if (reportsWithCurrent.length > 0) {
+ commentBody += '\nš Quick Summary (Click to expand)
\n\n';
+ commentBody += '| Project | Total Size | Change |\n';
+ commentBody += '|---------|------------|--------|\n';
+ for (const report of reportsWithCurrent){
+ if (!report.current) continue;
+ const currentSize = report.current.totalSize;
+ const baselineSize = report.baseline?.totalSize || 0;
+ const diff = report.baseline ? calculateDiff(currentSize, baselineSize) : {
+ value: '-',
+ emoji: ''
+ };
+ const sizeStr = formatBytes(currentSize);
+ commentBody += `| ${report.projectName} | ${sizeStr} | ${diff.emoji} ${diff.value} |\n`;
+ }
+ commentBody += '\n \n\n';
+ }
+ if (reportsWithCurrent.length > 1) commentBody += '\nš Detailed Reports (Click to expand)
\n\n';
for (const report of projectReports)if (report.current) {
commentBody += generateProjectMarkdown(report.projectName, report.filePath, report.current, report.baseline || void 0, report.baselineCommitHash, report.baselinePRs);
if (report.diffHtmlArtifactId) {
@@ -96729,6 +96847,7 @@ var __webpack_exports__ = {};
commentBody += `\nš¦ **Download Diff Report**: [${report.projectName} Bundle Diff](${artifactDownloadLink})\n\n`;
}
}
+ if (reportsWithCurrent.length > 1) commentBody += ' \n\n';
commentBody += '*Generated by [Rsdoctor GitHub Action](https://rsdoctor.rs/guide/start/action)*';
try {
await githubService.updateOrCreateComment(context.payload.pull_request.number, commentBody);
diff --git a/src/__tests__/github.test.ts b/src/__tests__/github.test.ts
index c7f8f32..d15427d 100644
--- a/src/__tests__/github.test.ts
+++ b/src/__tests__/github.test.ts
@@ -39,8 +39,89 @@ describe('GitHub Service', () => {
},
});
- const commit = await githubService.getTargetBranchLatestCommit();
- expect(commit).toBe(mockCommitSha);
+ // Mock workflow runs check (no artifacts found)
+ nock('https://api.github.com')
+ .get('/repos/web-infra-dev/rsdoctor-action/actions/runs')
+ .query({ head_sha: mockCommitSha, status: 'completed', per_page: 30 })
+ .reply(200, {
+ workflow_runs: [],
+ });
+
+ // Mock get parent commit (no parent, reached beginning)
+ nock('https://api.github.com')
+ .get(`/repos/web-infra-dev/rsdoctor-action/commits/${mockCommitSha}`)
+ .reply(200, {
+ sha: mockCommitSha + '0123456789',
+ parents: [],
+ });
+
+ const result = await githubService.getTargetBranchLatestCommit();
+ expect(result).toHaveProperty('commitHash');
+ expect(result).toHaveProperty('usedFallbackCommit');
+ expect(result.commitHash).toBe(mockCommitSha);
+ expect(result.usedFallbackCommit).toBe(false);
+ });
+
+ it('should return object with fallback info when latest commit has no artifacts', async () => {
+ const mockCommitSha = 'abcdef1234';
+ const mockParentSha = 'parent1234';
+ nock('https://api.github.com')
+ .get('/repos/web-infra-dev/rsdoctor-action/branches/main')
+ .reply(200, {
+ commit: {
+ sha: mockCommitSha + '0123456789',
+ },
+ });
+
+ // Mock workflow runs check for latest commit (no artifacts)
+ nock('https://api.github.com')
+ .get('/repos/web-infra-dev/rsdoctor-action/actions/runs')
+ .query({ head_sha: mockCommitSha, status: 'completed', per_page: 30 })
+ .reply(200, {
+ workflow_runs: [],
+ });
+
+ // Mock get parent commit
+ nock('https://api.github.com')
+ .get(`/repos/web-infra-dev/rsdoctor-action/commits/${mockCommitSha}`)
+ .reply(200, {
+ sha: mockCommitSha + '0123456789',
+ parents: [
+ { sha: mockParentSha + '0123456789' },
+ ],
+ });
+
+ // Mock workflow runs check for parent commit (has artifacts)
+ nock('https://api.github.com')
+ .get('/repos/web-infra-dev/rsdoctor-action/actions/runs')
+ .query({ head_sha: mockParentSha, status: 'completed', per_page: 30 })
+ .reply(200, {
+ workflow_runs: [
+ {
+ id: 123,
+ name: 'CI',
+ status: 'completed',
+ conclusion: 'success',
+ },
+ ],
+ });
+
+ // Mock artifacts for parent commit
+ nock('https://api.github.com')
+ .get('/repos/web-infra-dev/rsdoctor-action/actions/runs/123/artifacts')
+ .reply(200, {
+ artifacts: [
+ { id: 1, name: 'test-artifact' },
+ ],
+ });
+
+ const result = await githubService.getTargetBranchLatestCommit();
+ expect(result).toHaveProperty('commitHash');
+ expect(result).toHaveProperty('usedFallbackCommit');
+ expect(result).toHaveProperty('latestCommitHash');
+ expect(result.commitHash).toBe(mockParentSha);
+ expect(result.usedFallbackCommit).toBe(true);
+ expect(result.latestCommitHash).toBe(mockCommitSha);
});
});
});
diff --git a/src/github.ts b/src/github.ts
index 7a26f15..508b8df 100644
--- a/src/github.ts
+++ b/src/github.ts
@@ -77,11 +77,64 @@ export class GitHubService {
return runsResponse.data;
}
- async getTargetBranchLatestCommit(): Promise {
+ /**
+ * Check if a commit has any artifacts by checking its workflow runs
+ */
+ async hasArtifactsForCommit(commitHash: string): Promise {
+ try {
+ const workflowRuns = await this.findAllWorkflowRunsByCommit(commitHash);
+
+ for (const workflowRun of workflowRuns) {
+ try {
+ const runArtifacts = await this.listArtifactsForWorkflowRun(workflowRun.id);
+ if (runArtifacts.artifacts && runArtifacts.artifacts.length > 0) {
+ return true;
+ }
+ } catch (error) {
+ // Continue checking other workflow runs
+ continue;
+ }
+ }
+
+ return false;
+ } catch (error) {
+ // If we can't check, assume no artifacts
+ return false;
+ }
+ }
+
+ /**
+ * Get parent commit hash
+ */
+ async getParentCommit(commitHash: string): Promise {
+ const { owner, repo } = this.repository;
+
+ try {
+ const commitResponse = await this.octokit.rest.repos.getCommit({
+ owner,
+ repo,
+ ref: commitHash
+ });
+
+ if (commitResponse.data.parents && commitResponse.data.parents.length > 0) {
+ return commitResponse.data.parents[0].sha.substring(0, 10);
+ }
+
+ return null;
+ } catch (error) {
+ const apiError = error as ApiError;
+ console.warn(`ā ļø Failed to get parent commit for ${commitHash}: ${apiError.message}`);
+ return null;
+ }
+ }
+
+ async getTargetBranchLatestCommit(): Promise<{ commitHash: string; usedFallbackCommit: boolean; latestCommitHash?: string }> {
const targetBranch = this.getTargetBranch();
console.log(`š Attempting to get latest commit for target branch: ${targetBranch}`);
console.log(`š Repository: ${this.repository.owner}/${this.repository.repo}`);
+ let latestCommitHash: string | null = null;
+
try {
console.log(`š” Trying to get latest commit from GitHub API...`);
const { owner, repo } = this.repository;
@@ -94,9 +147,8 @@ export class GitHubService {
});
if (branchResponse.data && branchResponse.data.commit) {
- const commitHash = branchResponse.data.commit.sha.substring(0, 10);
- console.log(`ā
Found commit hash from GitHub API: ${commitHash}`);
- return commitHash;
+ latestCommitHash = branchResponse.data.commit.sha.substring(0, 10);
+ console.log(`ā
Found commit hash from GitHub API: ${latestCommitHash}`);
}
} catch (error) {
const apiError = error as ApiError;
@@ -114,9 +166,9 @@ export class GitHubService {
});
if (altResponse.data && altResponse.data.commit) {
- const commitHash = altResponse.data.commit.sha.substring(0, 10);
- console.log(`ā
Found commit hash from alternative branch ${altBranch}: ${commitHash}`);
- return commitHash;
+ latestCommitHash = altResponse.data.commit.sha.substring(0, 10);
+ console.log(`ā
Found commit hash from alternative branch ${altBranch}: ${latestCommitHash}`);
+ break;
}
} catch (error) {
const altError = error as ApiError;
@@ -126,59 +178,125 @@ export class GitHubService {
}
}
- console.log(`š Trying to get from workflow runs...`);
- try {
- const runs = await this.listWorkflowRuns({
- branch: targetBranch,
- status: 'completed',
- limit: 10
- });
+ if (!latestCommitHash) {
+ console.log(`š Trying to get from workflow runs...`);
+ try {
+ const runs = await this.listWorkflowRuns({
+ branch: targetBranch,
+ status: 'completed',
+ limit: 10
+ });
+
+ if (runs.workflow_runs && runs.workflow_runs.length > 0) {
+ console.log(`Found ${runs.workflow_runs.length} workflow runs for ${targetBranch}`);
+
+ const successfulRun = runs.workflow_runs.find((run: WorkflowRun) => run.conclusion === 'success');
+ if (successfulRun) {
+ latestCommitHash = successfulRun.head_sha.substring(0, 10);
+ console.log(`ā
Found successful workflow run for ${targetBranch}: ${latestCommitHash}`);
+ } else {
+ const latestRun = runs.workflow_runs[0] as WorkflowRun;
+ latestCommitHash = latestRun.head_sha.substring(0, 10);
+ console.log(`ā ļø No successful runs found, using latest workflow run for ${targetBranch}: ${latestCommitHash}`);
+ }
+ }
+ } catch (error) {
+ const workflowError = error as ApiError;
+ console.warn(`ā ļø Failed to get workflow runs: ${workflowError.message}`);
+ }
+ }
- if (runs.workflow_runs && runs.workflow_runs.length > 0) {
- console.log(`Found ${runs.workflow_runs.length} workflow runs for ${targetBranch}`);
+ if (!latestCommitHash) {
+ console.log(`š§ No workflow runs found for ${targetBranch}, trying to fetch from remote...`);
+ try {
+ console.log(`š„ Running: git fetch origin`);
+ execSync('git fetch origin', { encoding: 'utf8' });
- const successfulRun = runs.workflow_runs.find((run: WorkflowRun) => run.conclusion === 'success');
- if (successfulRun) {
- console.log(`ā
Found successful workflow run for ${targetBranch}: ${successfulRun.head_sha}`);
- return successfulRun.head_sha.substring(0, 10);
- }
+ console.log(`š„ Running: git rev-parse --short=10 origin/${targetBranch}`);
+ latestCommitHash = execSync(`git rev-parse --short=10 origin/${targetBranch}`, { encoding: 'utf8' }).trim();
+ console.log(`ā
Found commit hash from git: ${latestCommitHash}`);
+ } catch (gitError) {
+ console.warn(`ā Git fetch failed: ${gitError}`);
- const latestRun = runs.workflow_runs[0] as WorkflowRun;
- console.log(`ā ļø No successful runs found, using latest workflow run for ${targetBranch}: ${latestRun.head_sha}`);
- return latestRun.head_sha.substring(0, 10);
+ try {
+ console.log(`š„ Trying alternative: git ls-remote origin ${targetBranch}`);
+ const remoteRef = execSync(`git ls-remote origin ${targetBranch}`, { encoding: 'utf8' }).trim();
+ if (remoteRef) {
+ latestCommitHash = remoteRef.split('\t')[0].substring(0, 10);
+ console.log(`ā
Found commit hash from git ls-remote: ${latestCommitHash}`);
+ }
+ } catch (altError) {
+ console.warn(`ā Alternative git command failed: ${altError}`);
+ }
}
- } catch (error) {
- const workflowError = error as ApiError;
- console.warn(`ā ļø Failed to get workflow runs: ${workflowError.message}`);
}
- console.log(`š§ No workflow runs found for ${targetBranch}, trying to fetch from remote...`);
- try {
- console.log(`š„ Running: git fetch origin`);
- execSync('git fetch origin', { encoding: 'utf8' });
+ if (!latestCommitHash) {
+ console.error(`ā All methods to get target branch commit have failed`);
+ throw new Error(`Unable to get target branch (${targetBranch}) commit hash. Please ensure the branch exists and you have correct permissions.`);
+ }
+
+ // Check if the latest commit has artifacts, if not, look for previous commits
+ console.log(`š Checking if commit ${latestCommitHash} has baseline artifacts...`);
+ const hasArtifacts = await this.hasArtifactsForCommit(latestCommitHash);
+
+ if (hasArtifacts) {
+ console.log(`ā
Commit ${latestCommitHash} has baseline artifacts`);
+ return {
+ commitHash: latestCommitHash,
+ usedFallbackCommit: false
+ };
+ }
+
+ // Latest commit doesn't have artifacts, look for previous commits
+ console.log(`ā ļø Commit ${latestCommitHash} does not have baseline artifacts`);
+ console.log(`š Looking for previous commits with baseline artifacts...`);
+
+ let currentCommit = latestCommitHash;
+ let checkedCommits: string[] = [currentCommit];
+ const maxDepth = 5;
+
+ for (let depth = 0; depth < maxDepth; depth++) {
+ const parentCommit = await this.getParentCommit(currentCommit);
- console.log(`š„ Running: git rev-parse --short=10 origin/${targetBranch}`);
- const commitHash = execSync(`git rev-parse --short=10 origin/${targetBranch}`, { encoding: 'utf8' }).trim();
- console.log(`ā
Found commit hash from git: ${commitHash}`);
- return commitHash;
- } catch (gitError) {
- console.warn(`ā Git fetch failed: ${gitError}`);
+ if (!parentCommit) {
+ console.log(`ā ļø Reached the beginning of the branch, no more parent commits`);
+ break;
+ }
- try {
- console.log(`š„ Trying alternative: git ls-remote origin ${targetBranch}`);
- const remoteRef = execSync(`git ls-remote origin ${targetBranch}`, { encoding: 'utf8' }).trim();
- if (remoteRef) {
- const commitHash = remoteRef.split('\t')[0].substring(0, 10);
- console.log(`ā
Found commit hash from git ls-remote: ${commitHash}`);
- return commitHash;
- }
- } catch (altError) {
- console.warn(`ā Alternative git command failed: ${altError}`);
+ if (checkedCommits.includes(parentCommit)) {
+ console.log(`ā ļø Detected circular reference, stopping search`);
+ break;
}
+
+ checkedCommits.push(parentCommit);
+ console.log(`š Checking parent commit ${parentCommit}...`);
+
+ const parentHasArtifacts = await this.hasArtifactsForCommit(parentCommit);
+
+ if (parentHasArtifacts) {
+ console.log(`ā
Found commit ${parentCommit} with baseline artifacts`);
+ console.log(`\nā ļø Note: The latest commit (${latestCommitHash}) does not have baseline artifacts.`);
+ console.log(` Using commit ${parentCommit} for baseline comparison instead.`);
+ console.log(` If this seems incorrect, please wait a few minutes and try rerunning the workflow.`);
+ return {
+ commitHash: parentCommit,
+ usedFallbackCommit: true,
+ latestCommitHash: latestCommitHash
+ };
+ }
+
+ currentCommit = parentCommit;
}
-
- console.error(`ā All methods to get target branch commit have failed`);
- throw new Error(`Unable to get target branch (${targetBranch}) commit hash. Please ensure the branch exists and you have correct permissions.`);
+
+ // No commits with artifacts found
+ console.log(`\nā ļø No commits with baseline artifacts found in the last ${maxDepth} commits.`);
+ console.log(` Using latest commit ${latestCommitHash} anyway.`);
+ console.log(` Note: If baseline comparison fails, please wait a few minutes and try rerunning the workflow.`);
+ return {
+ commitHash: latestCommitHash,
+ usedFallbackCommit: false
+ };
} catch (error) {
console.error(`ā Failed to get target branch commit: ${error}`);
diff --git a/src/index.ts b/src/index.ts
index a2597a3..63de3ba 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -2,7 +2,7 @@ import { setFailed, getInput, summary } from '@actions/core';
import { uploadArtifact, hashPath } from './upload';
import { downloadArtifactByCommitHash } from './download';
import { GitHubService } from './github';
-import { loadSizeData, generateSizeReport, parseRsdoctorData, generateBundleAnalysisReport, BundleAnalysis, generateProjectMarkdown } from './report';
+import { loadSizeData, generateSizeReport, parseRsdoctorData, generateBundleAnalysisReport, BundleAnalysis, generateProjectMarkdown, formatBytes, calculateDiff } from './report';
import path from 'path';
import * as fs from 'fs';
import { execFile } from 'child_process';
@@ -84,6 +84,8 @@ interface ProjectReport {
baselinePRs?: Array<{ number: number; title: string; url: string }>;
diffHtmlPath?: string;
diffHtmlArtifactId?: number;
+ baselineUsedFallback?: boolean;
+ baselineLatestCommitHash?: string;
}
function extractProjectName(filePath: string): string {
@@ -122,6 +124,8 @@ async function processSingleFile(
fullPath: string,
currentCommitHash: string,
targetCommitHash: string | null,
+ baselineUsedFallback?: boolean,
+ baselineLatestCommitHash?: string,
): Promise {
const fileName = path.basename(fullPath);
const relativePath = path.relative(process.cwd(), fullPath);
@@ -162,6 +166,8 @@ async function processSingleFile(
if (baselineBundleAnalysis) {
report.baseline = baselineBundleAnalysis;
report.baselineCommitHash = targetCommitHash;
+ report.baselineUsedFallback = baselineUsedFallback;
+ report.baselineLatestCommitHash = baselineLatestCommitHash;
// Try to find associated PRs for the baseline commit
try {
@@ -276,11 +282,20 @@ async function processSingleFile(
console.log(`Current commit hash: ${currentCommitHash}`);
let targetCommitHash: string | null = null;
+ let baselineUsedFallback = false;
+ let baselineLatestCommitHash: string | undefined = undefined;
+
if (isPullRequestEvent()) {
try {
console.log('š Getting target branch commit hash...');
- targetCommitHash = await githubService.getTargetBranchLatestCommit();
+ const commitInfo = await githubService.getTargetBranchLatestCommit();
+ targetCommitHash = commitInfo.commitHash;
+ baselineUsedFallback = commitInfo.usedFallbackCommit;
+ baselineLatestCommitHash = commitInfo.latestCommitHash;
console.log(`ā
Target branch commit hash: ${targetCommitHash}`);
+ if (baselineUsedFallback && baselineLatestCommitHash) {
+ console.log(`ā ļø Using fallback commit: ${targetCommitHash} (latest: ${baselineLatestCommitHash})`);
+ }
} catch (error) {
console.error(`ā Failed to get target branch commit: ${error}`);
console.log('š No baseline data available for comparison');
@@ -351,7 +366,7 @@ async function processSingleFile(
console.log('š„ Detected pull request event - processing files');
for (const fullPath of matchedFiles) {
- const report = await processSingleFile(fullPath, currentCommitHash, targetCommitHash);
+ const report = await processSingleFile(fullPath, currentCommitHash, targetCommitHash, baselineUsedFallback, baselineLatestCommitHash);
projectReports.push(report);
}
@@ -359,11 +374,21 @@ async function processSingleFile(
if (projectReports.length === 1) {
const report = projectReports[0];
if (report.current) {
+ // Add fallback notice if applicable
+ if (report.baselineUsedFallback && report.baselineLatestCommitHash) {
+ await summary.addRaw(`> ā ļø **Note:** The latest commit (\`${report.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${report.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`);
+ }
await generateBundleAnalysisReport(report.current, report.baseline || undefined, true, report.baselineCommitHash, report.baselinePRs);
}
} else {
await summary.addHeading('š¦ Monorepo Bundle Analysis', 2);
+ // Add fallback notice if applicable (check first report)
+ const firstReport = projectReports.find(r => r.current);
+ if (firstReport?.baselineUsedFallback && firstReport?.baselineLatestCommitHash) {
+ await summary.addRaw(`> ā ļø **Note:** The latest commit (\`${firstReport.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${firstReport.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`);
+ }
+
for (const report of projectReports) {
if (!report.current) continue;
@@ -384,8 +409,39 @@ async function processSingleFile(
let commentBody = '## Rsdoctor Bundle Diff Analysis\n\n';
- if (projectReports.length > 1) {
- commentBody += `Found ${projectReports.length} project(s) in monorepo.\n\n`;
+ // Add fallback notice if applicable (check first report)
+ const firstReport = projectReports.find(r => r.current);
+ if (firstReport?.baselineUsedFallback && firstReport?.baselineLatestCommitHash) {
+ commentBody += `> ā ļø **Note:** The latest commit (\`${firstReport.baselineLatestCommitHash}\`) does not have baseline artifacts. Using commit \`${firstReport.baselineCommitHash}\` for baseline comparison instead. If this seems incorrect, please wait a few minutes and try rerunning the workflow.\n\n`;
+ }
+
+ // Generate summary (always visible)
+ const reportsWithCurrent = projectReports.filter(r => r.current);
+ if (reportsWithCurrent.length > 1) {
+ commentBody += `Found ${reportsWithCurrent.length} project(s) in monorepo.\n\n`;
+ }
+
+ // Generate summary table for quick overview
+ if (reportsWithCurrent.length > 0) {
+ commentBody += '\nš Quick Summary (Click to expand)
\n\n';
+ commentBody += '| Project | Total Size | Change |\n';
+ commentBody += '|---------|------------|--------|\n';
+
+ for (const report of reportsWithCurrent) {
+ if (!report.current) continue;
+ const currentSize = report.current.totalSize;
+ const baselineSize = report.baseline?.totalSize || 0;
+ const diff = report.baseline ? calculateDiff(currentSize, baselineSize) : { value: '-', emoji: '' };
+ const sizeStr = formatBytes(currentSize);
+ commentBody += `| ${report.projectName} | ${sizeStr} | ${diff.emoji} ${diff.value} |\n`;
+ }
+
+ commentBody += '\n \n\n';
+ }
+
+ // Generate detailed reports (collapsed by default)
+ if (reportsWithCurrent.length > 1) {
+ commentBody += '\nš Detailed Reports (Click to expand)
\n\n';
}
for (const report of projectReports) {
@@ -400,6 +456,10 @@ async function processSingleFile(
}
}
+ if (reportsWithCurrent.length > 1) {
+ commentBody += ' \n\n';
+ }
+
commentBody += '*Generated by [Rsdoctor GitHub Action](https://rsdoctor.rs/guide/start/action)*';
try {
diff --git a/src/report.ts b/src/report.ts
index 480e32c..366cb0b 100644
--- a/src/report.ts
+++ b/src/report.ts
@@ -49,7 +49,7 @@ export interface BundleAnalysis {
}>;
}
-function formatBytes(bytes: number): string {
+export function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const k = 1024;
@@ -155,7 +155,7 @@ export function loadSizeData(filePath: string): SizeData | null {
}
}
-function calculateDiff(current: number, baseline: number): { value: string; emoji: string } {
+export function calculateDiff(current: number, baseline: number): { value: string; emoji: string } {
if (!baseline || baseline === 0 || isNaN(baseline)) {
return { value: 'N/A', emoji: 'ā' };
}