Skip to content

Commit 2ffb18d

Browse files
committed
Backwards compatability
1 parent 84b0706 commit 2ffb18d

File tree

3 files changed

+300
-121
lines changed

3 files changed

+300
-121
lines changed

dojo/tools/github_vulnerability/parser.py

Lines changed: 177 additions & 121 deletions
Original file line numberDiff line numberDiff line change
@@ -17,128 +17,184 @@ def get_description_for_scan_types(self, scan_type):
1717
return "Import vulnerabilities from Github API (GraphQL Query)"
1818

1919
def get_findings(self, filename, test):
20-
# SCA: GraphQL vulnerabilityAlerts flow only
2120
data = json.load(filename)
22-
if not isinstance(data, dict) or "data" not in data:
23-
error_msg = (
24-
"Invalid report format, expected a GitHub RepositoryVulnerabilityAlert GraphQL query response. "
25-
"If you're trying to upload code scanning results, please use the Github SAST scan type."
26-
)
27-
raise ValueError(error_msg)
28-
29-
alerts = self._search_vulnerability_alerts(data.get("data"))
30-
if not alerts:
31-
error_msg = "Invalid report, no 'vulnerabilityAlerts' node found"
32-
raise ValueError(error_msg)
33-
34-
repo = data.get("data").get("repository", {})
35-
repo_url = repo.get("url")
36-
37-
dupes = {}
38-
for alert in alerts.get("nodes", []):
39-
vuln = alert.get("securityVulnerability", {})
40-
advisory = vuln.get("advisory", {})
41-
summary = advisory.get("summary", "")
42-
desc = advisory.get("description", "")
43-
44-
pr_link = None
45-
dependabot_update = alert.get("dependabotUpdate", {})
46-
if dependabot_update:
47-
pr = dependabot_update.get("pullRequest", {})
48-
if pr:
49-
pr_link = pr.get("permalink")
50-
desc = f"Fix PR: [{pr_link}]({pr_link})\n" + desc
51-
52-
alert_num = alert.get("number")
53-
if alert_num and repo_url:
54-
alert_link = f"{repo_url}/security/dependabot/{alert_num}"
55-
desc = f"Repo Alert: [{alert_link}]({alert_link})\n" + desc
56-
57-
finding = Finding(
58-
title=summary,
59-
test=test,
60-
description=desc,
61-
severity=self._convert_security(vuln.get("severity", "MODERATE")),
62-
static_finding=True,
63-
dynamic_finding=False,
64-
unique_id_from_tool=alert.get("id"),
65-
)
66-
67-
if alert_num and repo_url:
68-
finding.url = alert_link
69-
70-
cwes = advisory.get("cwes", {}).get("nodes", [])
71-
if cwes:
72-
cwe_id = cwes[0].get("cweId", "")[4:]
73-
if cwe_id.isdigit():
74-
finding.cwe = int(cwe_id)
75-
76-
if alert.get("vulnerableManifestPath"):
77-
finding.file_path = alert.get("vulnerableManifestPath")
78-
req = alert.get("vulnerableRequirements", "")
79-
if req.startswith("= "):
80-
finding.component_version = req[2:]
81-
elif req:
82-
finding.component_version = req
83-
pkg = vuln.get("package", {})
84-
finding.component_name = pkg.get("name")
85-
86-
if alert.get("createdAt"):
87-
finding.date = dateutil.parser.parse(alert.get("createdAt"))
88-
if alert.get("state") in {"FIXED", "DISMISSED"}:
89-
finding.active = False
90-
finding.is_mitigated = True
91-
92-
ref_urls = [r.get("url") for r in advisory.get("references", []) if r.get("url")]
93-
if alert_num and repo_url:
94-
ref_urls.append(alert_link)
95-
if pr_link:
96-
ref_urls.append(pr_link)
97-
if ref_urls:
98-
finding.references = "\r\n".join(ref_urls)
99-
100-
ids = [i.get("value") for i in advisory.get("identifiers", []) if i.get("value")]
101-
if ids:
102-
for identifier in ids:
103-
if identifier.startswith("CVE-"):
104-
finding.cve = identifier
105-
elif identifier.startswith("GHSA-"):
106-
finding.vuln_id_from_tool = identifier
107-
if not finding.vuln_id_from_tool:
108-
finding.vuln_id_from_tool = ids[0]
109-
finding.unsaved_vulnerability_ids = ids
110-
111-
# cvss is deprecated, so we favor cvssSeverities if it exists
112-
for key in ("cvssSeverities", "cvss"):
113-
cvss = advisory.get(key, {})
114-
if key == "cvssSeverities" and cvss:
115-
cvss = cvss.get("cvssV3", {})
116-
if cvss:
117-
score = cvss.get("score")
118-
if score is not None:
119-
finding.cvssv3_score = score
120-
vec = cvss.get("vectorString")
121-
if vec:
122-
parsed = cvss_parser.parse_cvss_from_text(vec)
123-
if parsed:
124-
finding.cvssv3 = parsed[0].clean_vector()
125-
break
126-
127-
epss = advisory.get("epss", {})
128-
percentage = epss.get("percentage")
129-
percentile = epss.get("percentile")
130-
if percentage is not None:
131-
finding.epss_score = percentage
132-
if percentile is not None:
133-
finding.epss_percentile = percentile
134-
135-
dupe_key = finding.unique_id_from_tool
136-
if dupe_key in dupes:
137-
dupes[dupe_key].nb_occurences += 1
138-
else:
139-
dupes[dupe_key] = finding
140-
141-
return list(dupes.values())
21+
22+
if isinstance(data, dict):
23+
if "data" not in data:
24+
error_msg = (
25+
"Invalid report format, expected a GitHub RepositoryVulnerabilityAlert GraphQL query response."
26+
)
27+
raise ValueError(error_msg)
28+
29+
alerts = self._search_vulnerability_alerts(data.get("data"))
30+
if not alerts:
31+
error_msg = "Invalid report, no 'vulnerabilityAlerts' node found"
32+
raise ValueError(error_msg)
33+
34+
repo = data.get("data").get("repository", {})
35+
repo_url = repo.get("url")
36+
37+
dupes = {}
38+
for alert in alerts.get("nodes", []):
39+
vuln = alert.get("securityVulnerability", {})
40+
advisory = vuln.get("advisory", {})
41+
summary = advisory.get("summary", "")
42+
desc = advisory.get("description", "")
43+
44+
pr_link = None
45+
dependabot_update = alert.get("dependabotUpdate", {})
46+
if dependabot_update:
47+
pr = dependabot_update.get("pullRequest", {})
48+
if pr:
49+
pr_link = pr.get("permalink")
50+
desc = f"Fix PR: [{pr_link}]({pr_link})\n" + desc
51+
52+
alert_num = alert.get("number")
53+
if alert_num and repo_url:
54+
alert_link = f"{repo_url}/security/dependabot/{alert_num}"
55+
desc = f"Repo Alert: [{alert_link}]({alert_link})\n" + desc
56+
57+
finding = Finding(
58+
title=summary,
59+
test=test,
60+
description=desc,
61+
severity=self._convert_security(vuln.get("severity", "MODERATE")),
62+
static_finding=True,
63+
dynamic_finding=False,
64+
unique_id_from_tool=alert.get("id"),
65+
)
66+
67+
if alert_num and repo_url:
68+
finding.url = alert_link
69+
70+
cwes = advisory.get("cwes", {}).get("nodes", [])
71+
if cwes:
72+
cwe_id = cwes[0].get("cweId", "")[4:]
73+
if cwe_id.isdigit():
74+
finding.cwe = int(cwe_id)
75+
76+
if alert.get("vulnerableManifestPath"):
77+
finding.file_path = alert.get("vulnerableManifestPath")
78+
req = alert.get("vulnerableRequirements", "")
79+
if req.startswith("= "):
80+
finding.component_version = req[2:]
81+
elif req:
82+
finding.component_version = req
83+
pkg = vuln.get("package", {})
84+
finding.component_name = pkg.get("name")
85+
86+
if alert.get("createdAt"):
87+
finding.date = dateutil.parser.parse(alert.get("createdAt"))
88+
if alert.get("state") in {"FIXED", "DISMISSED"}:
89+
finding.active = False
90+
finding.is_mitigated = True
91+
92+
ref_urls = [r.get("url") for r in advisory.get("references", []) if r.get("url")]
93+
if alert_num and repo_url:
94+
ref_urls.append(alert_link)
95+
if pr_link:
96+
ref_urls.append(pr_link)
97+
if ref_urls:
98+
finding.references = "\r\n".join(ref_urls)
99+
100+
ids = [i.get("value") for i in advisory.get("identifiers", []) if i.get("value")]
101+
if ids:
102+
for identifier in ids:
103+
if identifier.startswith("CVE-"):
104+
finding.cve = identifier
105+
elif identifier.startswith("GHSA-"):
106+
finding.vuln_id_from_tool = identifier
107+
if not finding.vuln_id_from_tool:
108+
finding.vuln_id_from_tool = ids[0]
109+
finding.unsaved_vulnerability_ids = ids
110+
111+
# cvss is deprecated, so we favor cvssSeverities if it exists
112+
for key in ("cvssSeverities", "cvss"):
113+
cvss = advisory.get(key, {})
114+
if key == "cvssSeverities" and cvss:
115+
cvss = cvss.get("cvssV3", {})
116+
if cvss:
117+
score = cvss.get("score")
118+
if score is not None:
119+
finding.cvssv3_score = score
120+
vec = cvss.get("vectorString")
121+
if vec:
122+
parsed = cvss_parser.parse_cvss_from_text(vec)
123+
if parsed:
124+
finding.cvssv3 = parsed[0].clean_vector()
125+
break
126+
127+
epss = advisory.get("epss", {})
128+
percentage = epss.get("percentage")
129+
percentile = epss.get("percentile")
130+
if percentage is not None:
131+
finding.epss_score = percentage
132+
if percentile is not None:
133+
finding.epss_percentile = percentile
134+
135+
dupe_key = finding.unique_id_from_tool
136+
if dupe_key in dupes:
137+
dupes[dupe_key].nb_occurences += 1
138+
else:
139+
dupes[dupe_key] = finding
140+
141+
return list(dupes.values())
142+
143+
if isinstance(data, list):
144+
findings = []
145+
for vuln in data:
146+
url = vuln["url"]
147+
html_url = vuln["html_url"]
148+
active = vuln["state"] == "open"
149+
ruleid = vuln["rule"]["id"]
150+
ruleseverity = vuln["rule"]["severity"]
151+
ruledescription = vuln["rule"]["description"]
152+
rulename = vuln["rule"]["name"]
153+
ruletags = vuln["rule"]["tags"]
154+
severity = vuln["rule"]["security_severity_level"]
155+
most_recent_instanceref = vuln["most_recent_instance"]["ref"]
156+
most_recent_instanceanalysis_key = vuln["most_recent_instance"]["analysis_key"]
157+
most_recent_instanceenvironment = vuln["most_recent_instance"]["environment"]
158+
most_recent_instancecategory = vuln["most_recent_instance"]["category"]
159+
most_recent_instancestate = vuln["most_recent_instance"]["state"]
160+
most_recent_instancecommit_sha = vuln["most_recent_instance"]["commit_sha"]
161+
most_recent_instancemessage = vuln["most_recent_instance"]["message"]["text"]
162+
location = vuln["most_recent_instance"]["location"]
163+
instancesurl = vuln["instances_url"]
164+
description = ruledescription + "\n"
165+
description += "**url:** " + url + "\n"
166+
description += "**html_url:** " + html_url + "\n"
167+
description += "**ruleid:** " + ruleid + "\n"
168+
description += "**ruleseverity:** " + ruleseverity + "\n"
169+
description += "**ruledescription:** " + ruledescription + "\n"
170+
description += "**rulename:** " + rulename + "\n"
171+
description += "**ruletags:** " + str(ruletags) + "\n"
172+
description += "**most_recent_instanceref:** " + most_recent_instanceref + "\n"
173+
description += "**most_recent_instanceanalysis_key:** " + most_recent_instanceanalysis_key + "\n"
174+
description += "**most_recent_instanceenvironment:** " + most_recent_instanceenvironment + "\n"
175+
description += "**most_recent_instancecategory:** " + most_recent_instancecategory + "\n"
176+
description += "**most_recent_instancestate:** " + most_recent_instancestate + "\n"
177+
description += "**most_recent_instancecommit_sha:** " + most_recent_instancecommit_sha + "\n"
178+
description += "**most_recent_instancemessage:** " + most_recent_instancemessage + "\n"
179+
description += "**location:** " + str(location) + "\n"
180+
description += "**instancesurl:** " + instancesurl + "\n"
181+
uniqueid = ruleid + url + most_recent_instanceanalysis_key + str(location)
182+
finding = Finding(
183+
title=ruleid,
184+
test=test,
185+
description=description,
186+
severity=severity.capitalize(),
187+
active=active,
188+
static_finding=True,
189+
dynamic_finding=False,
190+
unique_id_from_tool=uniqueid,
191+
)
192+
findings.append(finding)
193+
return findings
194+
error_msg = (
195+
"Invalid report format, expected a GitHub RepositoryVulnerabilityAlert GraphQL query response."
196+
)
197+
raise TypeError(error_msg)
142198

143199
def _search_vulnerability_alerts(self, data):
144200
if isinstance(data, dict):

0 commit comments

Comments
 (0)