diff --git a/meta/classes/cve-check.bbclass b/meta/classes/cve-check.bbclass index 75c5b92b961..c805a903b83 100644 --- a/meta/classes/cve-check.bbclass +++ b/meta/classes/cve-check.bbclass @@ -104,6 +104,7 @@ python do_cve_check () { if patched or unpatched: cve_data = get_cve_info(d, patched + unpatched) cve_write_data(d, patched, unpatched, whitelisted, cve_data) + convert_to_sarif(d) else: bb.note("No CVE database found, skipping CVE check") @@ -403,3 +404,114 @@ def cve_write_data(d, patched, unpatched, whitelisted, cve_data): with open(d.getVar("CVE_CHECK_TMP_FILE"), "a") as f: f.write("%s" % write_string) + + +def get_vulnerability(fh): + # reading ten lines chunk of each vulnerability, + # storing the key-value pairs in dictionary and then removing + # key names to be replaced by sarif-accepted key names + description=[] + d=[] + for i in range(0,10): + try: + line=next(fh).strip() + description += list( line.split(': ',1)) # giving maxsplit of 1 time + except: + break # drop if empty line + for i in range(0, len(description)-1,2): # removing attribute names from list + d.append(description[i+1]) + return d + + + +def get_criticality_from_cvss_v3(score): + # converting CVSS v3 Base score to criticality level, + # to ensure security level of the risk + if score == 0.0: + return 'none' + elif score < 4: + return 'note' + elif score < 7: + return 'warning' + else: + return "error" + +def convert_to_sarif(d): + from os.path import exists + import json + cve_file=d.getVar("CVE_CHECK_LOG") + file_exists=exists(cve_file) + PN=d.getVar("CVE_PRODUCT") + + if (file_exists): + resultsList=[] + rulesList=[] + output_file=open("cve-output/%s.sarif" %PN, "w") + fields=['LAYER', 'PACKAGE NAME', 'PACKAGE VERSION', 'ruleId','CVE STATUS','message', 'CVSS v2 BASE SCORE','CVSS v3 BASE SCORE','level',"help"] + + with open(cve_file) as fh: + resultsDictionary={} + rulesDictionary={} + while fh is not None: + valuesList = get_vulnerability(fh) + if(valuesList==[]): # break if no vulnerability is found + break + for i in range(0,len(fields)): + valuesList[8]=get_criticality_from_cvss_v3(float(valuesList[7])) + resultsDictionary[fields[i]]= valuesList[i] + + # Creating properties for rules + propertiesDictionary=resultsDictionary.copy() + propertiesDictionary.pop('ruleId') + propertiesDictionary.pop('message') + propertiesDictionary.pop('help') + propertiesDictionary.pop('level') + + + # Creating rules + rulesDictionary['id']=resultsDictionary['ruleId'] + rulesDictionary['help']=resultsDictionary['help'] + rulesDictionary["properties"]=propertiesDictionary + rulesDictionary['help']={'text':rulesDictionary['help']} # formatting Summary + + # Removing extra attributes from result's list + resultsDictionary.pop("CVSS v2 BASE SCORE") + resultsDictionary.pop("CVSS v3 BASE SCORE") + resultsDictionary.pop("CVE STATUS") + resultsDictionary.pop("help") + resultsDictionary.pop("LAYER") + resultsDictionary.pop("PACKAGE NAME") + resultsDictionary.pop("PACKAGE VERSION") + resultsDictionary['message']={'text':resultsDictionary['message']} # nested dictionary + + + rulesList.append(rulesDictionary.copy()) + resultsList.append(resultsDictionary.copy()) + try: + line=next(fh).strip() # drop empty line after reading one vulnerability + except: + break + output_file.write('{\n') + output_file.write('\t"$schema": "https://schemastore.azurewebsites.net/schemas/json/sarif-2.1.0.json",\n') + output_file.write('\t"version": "2.1.0",\n') + output_file.write('\t"runs":\n\t[\t{\n') + output_file.write('\t\t\t"tool": {\n') + output_file.write('\t\t\t"driver": {\n') + output_file.write('\t\t\t\t"name": "do_cve-check",\n') + output_file.write('\t\t\t\t"fullName": "Yocto Vulnerability Scanner",\n') + output_file.write('\t\t\t\t"informationUri": "https://pvs-studio.com/en/docs/manual/0038/",\n') + output_file.write('\t\t\t\t"version": "1.0.1",\n') + output_file.write('\t\t\t\t"rules": \n') + output_file.write('\t\t\t\t\t\t') + json.dump(rulesList,output_file,indent=15) # dumping rules in file + output_file.write('\n\t\t\t }\n') # closing of driver + output_file.write('\t\t},\n') # closing of tool + output_file.write('\t\t"results": \n') + output_file.write('\t\t\t\t\t\t') + json.dump(resultsList,output_file,indent=15) # dumping results in file + output_file.write('\n\t }') + output_file.write('\n\t]') + output_file.write('\n}') # closing bracket for schema + output_file.close() + +