|
6 | 6 |
|
7 | 7 | from datetime import datetime |
8 | 8 | from typing import Any |
| 9 | +from urllib.parse import urlparse |
9 | 10 |
|
| 11 | +from .constants import FALLBACK_LOG_LINES |
10 | 12 | from .fetcher import GitHubCIFetcher |
11 | 13 | from .log_parser import LogParser |
12 | 14 |
|
@@ -91,8 +93,13 @@ def get_job_details_for_status( |
91 | 93 | jobs = fetcher.get_workflow_jobs(owner, repo_name, run_id) |
92 | 94 | _add_failed_steps_to_job_details(fetcher, jobs, job_details) |
93 | 95 |
|
| 96 | + except (ValueError, KeyError, TypeError): |
| 97 | + # Return basic job details if we can't parse the step information |
| 98 | + # This can happen with unexpected API response format or missing data |
| 99 | + pass |
94 | 100 | except Exception: |
95 | | - # Return basic job details even if we can't get step information |
| 101 | + # Log unexpected errors but don't crash - return basic job details |
| 102 | + # TODO: Add proper logging here |
96 | 103 | pass |
97 | 104 |
|
98 | 105 | return job_details |
@@ -248,12 +255,35 @@ def retry_failed_workflows( |
248 | 255 |
|
249 | 256 |
|
250 | 257 | def _extract_run_id_from_url(html_url: str) -> int | None: |
251 | | - """Extract run ID from GitHub Actions URL.""" |
252 | | - if "actions/runs" not in html_url: |
| 258 | + """Extract run ID from GitHub Actions URL using proper URL parsing. |
| 259 | +
|
| 260 | + Args: |
| 261 | + html_url: GitHub Actions URL like 'https://github.com/owner/repo/actions/runs/123456/jobs/789' |
| 262 | +
|
| 263 | + Returns: |
| 264 | + The run ID (123456) or None if URL is invalid or doesn't contain a run ID |
| 265 | + """ |
| 266 | + if not html_url or "actions/runs" not in html_url: |
253 | 267 | return None |
| 268 | + |
254 | 269 | try: |
255 | | - return int(html_url.split("/runs/")[1].split("/")[0]) |
256 | | - except (IndexError, ValueError): |
| 270 | + parsed_url = urlparse(html_url) |
| 271 | + if not parsed_url.path: |
| 272 | + return None |
| 273 | + |
| 274 | + # Split path into components and find 'runs' segment |
| 275 | + path_parts = [part for part in parsed_url.path.split("/") if part] |
| 276 | + |
| 277 | + # Look for pattern: [..., 'actions', 'runs', '<run_id>', ...] |
| 278 | + for i, part in enumerate(path_parts): |
| 279 | + if part == "runs" and i + 1 < len(path_parts): |
| 280 | + run_id_str = path_parts[i + 1] |
| 281 | + if run_id_str.isdigit(): |
| 282 | + return int(run_id_str) |
| 283 | + break |
| 284 | + |
| 285 | + return None |
| 286 | + except (ValueError, AttributeError): |
257 | 287 | return None |
258 | 288 |
|
259 | 289 |
|
@@ -284,7 +314,12 @@ def _calculate_step_duration(step: dict[str, Any]) -> str: |
284 | 314 | start = datetime.fromisoformat(step["started_at"].replace("Z", "+00:00")) |
285 | 315 | end = datetime.fromisoformat(step["completed_at"].replace("Z", "+00:00")) |
286 | 316 | return f"{(end - start).total_seconds():.1f}s" |
| 317 | + except (ValueError, TypeError): |
| 318 | + # Handle invalid timestamp format or missing data |
| 319 | + return "Unknown" |
287 | 320 | except Exception: |
| 321 | + # Handle other unexpected errors in datetime calculation |
| 322 | + # TODO: Add proper logging here |
288 | 323 | return "Unknown" |
289 | 324 |
|
290 | 325 |
|
@@ -462,12 +497,19 @@ def _process_check_run_for_logs( |
462 | 497 | fetcher, owner, repo_name, jobs, name, show_groups, step_filter, group_filter |
463 | 498 | ) |
464 | 499 |
|
| 500 | + except (ValueError, KeyError, TypeError) as e: |
| 501 | + return { |
| 502 | + "name": name, |
| 503 | + "html_url": html_url, |
| 504 | + "step_logs": {}, |
| 505 | + "error": f"Failed to parse job data: {e}", |
| 506 | + } |
465 | 507 | except Exception as e: |
466 | 508 | return { |
467 | 509 | "name": name, |
468 | 510 | "html_url": html_url, |
469 | 511 | "step_logs": {}, |
470 | | - "error": f"Error processing job details: {e}", |
| 512 | + "error": f"Unexpected error processing job details: {e}", |
471 | 513 | } |
472 | 514 |
|
473 | 515 |
|
@@ -514,7 +556,9 @@ def _extract_step_logs_from_jobs( |
514 | 556 | else: |
515 | 557 | # Fallback to last few lines |
516 | 558 | step_lines = step_log.split("\n") |
517 | | - clean_log = "\n".join(line for line in step_lines[-10:] if line.strip()) |
| 559 | + clean_log = "\n".join( |
| 560 | + line for line in step_lines[-FALLBACK_LOG_LINES:] if line.strip() |
| 561 | + ) |
518 | 562 | filtered_step_logs[step_name] = _remove_timestamps(clean_log) |
519 | 563 |
|
520 | 564 | return { |
@@ -624,5 +668,10 @@ def _calculate_workflow_duration(run: dict[str, Any]) -> str: |
624 | 668 | end = datetime.now(start.tzinfo) |
625 | 669 | duration = end - start |
626 | 670 | return f"{int(duration.total_seconds())}s" |
| 671 | + except (ValueError, TypeError): |
| 672 | + # Handle invalid timestamp format or missing timezone info |
| 673 | + return "unknown" |
627 | 674 | except Exception: |
| 675 | + # Handle other unexpected errors in datetime calculation |
| 676 | + # TODO: Add proper logging here |
628 | 677 | return "unknown" |
0 commit comments