Skip to content

Commit 41b4482

Browse files
authored
Merge pull request #1557 from Altinity/update-cve-sorting
Antalya-26.1 - Update CVE sorting
2 parents 60fe13a + 4270240 commit 41b4482

2 files changed

Lines changed: 26 additions & 10 deletions

File tree

.github/actions/create_workflow_report/ci_run_report.html.jinja

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -226,6 +226,8 @@
226226
element.className = element.className.replace(regex_dir, '') + dir
227227
}
228228
function getValue(element) {
229+
var childWithSort = element.querySelector('[data-sort]')
230+
if (childWithSort) return childWithSort.getAttribute('data-sort')
229231
return (
230232
(alt_sort && element.getAttribute('data-sort-alt')) ||
231233
element.getAttribute('data-sort') || element.innerText

.github/actions/create_workflow_report/create_workflow_report.py

Lines changed: 24 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,9 @@
2626
GITHUB_REPO = "Altinity/ClickHouse"
2727
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") or os.getenv("GH_TOKEN")
2828

29+
CVE_SEVERITY_ORDER = {"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5}
30+
31+
2932
def get_commit_statuses(sha: str) -> pd.DataFrame:
3033
"""
3134
Fetch commit statuses for a given SHA and return as a pandas DataFrame.
@@ -498,7 +501,9 @@ def format_prefix(pr_number, commit_sha, branch):
498501
Bucket=S3_BUCKET, Prefix=s3_prefix, Delimiter="/"
499502
)
500503
grype_result_dirs.extend(
501-
content["Prefix"] for content in response.get("CommonPrefixes", [])
504+
content["Prefix"]
505+
for content in response.get("CommonPrefixes", [])
506+
if isinstance(content, dict) and content.get("Prefix")
502507
)
503508
except Exception as e:
504509
print(f"Error listing S3 objects at {s3_prefix}: {e}")
@@ -536,12 +541,13 @@ def format_prefix(pr_number, commit_sha, branch):
536541
return pd.DataFrame()
537542

538543
df = pd.DataFrame(rows).drop_duplicates()
539-
df = df.sort_values(
540-
by="severity",
541-
key=lambda col: col.str.lower().map(
542-
{"critical": 1, "high": 2, "medium": 3, "low": 4, "negligible": 5}
543-
),
544-
)
544+
545+
def _cve_sort_key(col):
546+
if col.name == "severity":
547+
return col.str.lower().map(CVE_SEVERITY_ORDER)
548+
return col
549+
550+
df = df.sort_values(by=["severity", "docker_image"], key=_cve_sort_key)
545551
return df
546552

547553

@@ -589,6 +595,9 @@ def format_results_as_html_table(results) -> str:
589595
"Identifier": lambda i: url_to_html_link(
590596
"https://nvd.nist.gov/vuln/detail/" + i
591597
),
598+
"Severity": lambda s: (
599+
f'<span data-sort="{CVE_SEVERITY_ORDER.get(str(s).lower(), 6)}">{s}</span>'
600+
),
592601
},
593602
escape=False,
594603
border=0,
@@ -772,11 +781,16 @@ def create_workflow_report(
772781
"pr_new_fails": [],
773782
"checks_errors": get_checks_errors(db_client, commit_sha, branch_name),
774783
"regression_fails": get_regression_fails(db_client, actions_run_url),
775-
"docker_images_cves": (
776-
[] if not check_cves else get_cves(pr_number, commit_sha, branch_name)
777-
),
784+
"docker_images_cves": [],
778785
}
779786

787+
try:
788+
fail_results["docker_images_cves"] = (
789+
[] if not check_cves else get_cves(pr_number, commit_sha, branch_name)
790+
)
791+
except Exception as e:
792+
print(f"Error in get_cves: {e}")
793+
780794
# get_cves returns ... in the case where no Grype result files were found.
781795
# This might occur when run in preview mode.
782796
cves_not_checked = not check_cves or fail_results["docker_images_cves"] is ...

0 commit comments

Comments
 (0)