|
26 | 26 | GITHUB_REPO = "Altinity/ClickHouse" |
27 | 27 | GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") or os.getenv("GH_TOKEN") |
28 | 28 |
|
| 29 | +CVE_SEVERITY_ORDER = {"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5} |
| 30 | + |
| 31 | + |
29 | 32 | def get_commit_statuses(sha: str) -> pd.DataFrame: |
30 | 33 | """ |
31 | 34 | Fetch commit statuses for a given SHA and return as a pandas DataFrame. |
@@ -498,7 +501,9 @@ def format_prefix(pr_number, commit_sha, branch): |
498 | 501 | Bucket=S3_BUCKET, Prefix=s3_prefix, Delimiter="/" |
499 | 502 | ) |
500 | 503 | grype_result_dirs.extend( |
501 | | - content["Prefix"] for content in response.get("CommonPrefixes", []) |
| 504 | + content["Prefix"] |
| 505 | + for content in response.get("CommonPrefixes", []) |
| 506 | + if isinstance(content, dict) and content.get("Prefix") |
502 | 507 | ) |
503 | 508 | except Exception as e: |
504 | 509 | print(f"Error listing S3 objects at {s3_prefix}: {e}") |
@@ -536,12 +541,13 @@ def format_prefix(pr_number, commit_sha, branch): |
536 | 541 | return pd.DataFrame() |
537 | 542 |
|
538 | 543 | df = pd.DataFrame(rows).drop_duplicates() |
539 | | - df = df.sort_values( |
540 | | - by="severity", |
541 | | - key=lambda col: col.str.lower().map( |
542 | | - {"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5} |
543 | | - ), |
544 | | - ) |
| 544 | + |
| 545 | + def _cve_sort_key(col): |
| 546 | + if col.name == "severity": |
| 547 | + return col.str.lower().map(CVE_SEVERITY_ORDER) |
| 548 | + return col |
| 549 | + |
| 550 | + df = df.sort_values(by=["severity", "docker_image"], key=_cve_sort_key) |
545 | 551 | return df |
546 | 552 |
|
547 | 553 |
|
@@ -589,6 +595,9 @@ def format_results_as_html_table(results) -> str: |
589 | 595 | "Identifier": lambda i: url_to_html_link( |
590 | 596 | "https://nvd.nist.gov/vuln/detail/" + i |
591 | 597 | ), |
| 598 | + "Severity": lambda s: ( |
| 599 | + f'<span data-sort="{CVE_SEVERITY_ORDER.get(str(s).lower(), 6)}">{s}</span>' |
| 600 | + ), |
592 | 601 | }, |
593 | 602 | escape=False, |
594 | 603 | border=0, |
@@ -772,11 +781,16 @@ def create_workflow_report( |
772 | 781 | "pr_new_fails": [], |
773 | 782 | "checks_errors": get_checks_errors(db_client, commit_sha, branch_name), |
774 | 783 | "regression_fails": get_regression_fails(db_client, actions_run_url), |
775 | | - "docker_images_cves": ( |
776 | | - [] if not check_cves else get_cves(pr_number, commit_sha, branch_name) |
777 | | - ), |
| 784 | + "docker_images_cves": [], |
778 | 785 | } |
779 | 786 |
|
| 787 | + try: |
| 788 | + fail_results["docker_images_cves"] = ( |
| 789 | + [] if not check_cves else get_cves(pr_number, commit_sha, branch_name) |
| 790 | + ) |
| 791 | + except Exception as e: |
| 792 | + print(f"Error in get_cves: {e}") |
| 793 | + |
780 | 794 | # get_cves returns ... in the case where no Grype result files were found. |
781 | 795 | # This might occur when run in preview mode. |
782 | 796 | cves_not_checked = not check_cves or fail_results["docker_images_cves"] is ... |
|
0 commit comments