Skip to content

Commit 667c0e6

Browse files
refactor: revert ruff formatting changes, keep only unsaved_tags logic
The previous commit mixed substantive changes (tags= → unsaved_tags) with ruff auto-formatting. This restores the original code style while preserving the performance fix.
1 parent 02e657a commit 667c0e6

5 files changed

Lines changed: 124 additions & 89 deletions

File tree

dojo/tools/anchore_grype/parser.py

Lines changed: 33 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -76,8 +76,7 @@ def get_findings(self, file, test):
7676
rel_epss = related_vulnerability.get("epss")
7777
rel_vuln_id = related_vulnerability.get("id")
7878
vulnerability_ids = self.get_vulnerability_ids(
79-
vuln_id,
80-
related_vulnerabilities,
79+
vuln_id, related_vulnerabilities,
8180
)
8281

8382
matches = item["matchDetails"]
@@ -88,25 +87,37 @@ def get_findings(self, file, test):
8887
artifact_purl = artifact.get("purl")
8988
artifact_location = artifact.get("locations")
9089
file_path = None
91-
if artifact_location and len(artifact_location) > 0 and artifact_location[0].get("path"):
90+
if (
91+
artifact_location
92+
and len(artifact_location) > 0
93+
and artifact_location[0].get("path")
94+
):
9295
file_path = artifact_location[0].get("path")
9396

9497
finding_title = f"{vuln_id} in {artifact_name}:{artifact_version}"
9598

9699
finding_tags = None
97100
finding_description = ""
98101
if vuln_namespace:
99-
finding_description += f"**Vulnerability Namespace:** {vuln_namespace}"
102+
finding_description += (
103+
f"**Vulnerability Namespace:** {vuln_namespace}"
104+
)
100105
if vuln_description:
101-
finding_description += f"\n**Vulnerability Description:** {vuln_description}"
106+
finding_description += (
107+
f"\n**Vulnerability Description:** {vuln_description}"
108+
)
102109
if rel_description and rel_description != vuln_description:
103110
finding_description += f"\n**Related Vulnerability Description:** {rel_description}"
104111
if matches:
105112
if isinstance(item["matchDetails"], dict):
106-
finding_description += f"\n**Matcher:** {matches['matcher']}"
113+
finding_description += (
114+
f"\n**Matcher:** {matches['matcher']}"
115+
)
107116
finding_tags = [matches["matcher"].replace("-matcher", "")]
108117
elif len(matches) == 1:
109-
finding_description += f"\n**Matcher:** {matches[0]['matcher']}"
118+
finding_description += (
119+
f"\n**Matcher:** {matches[0]['matcher']}"
120+
)
110121
finding_tags = [
111122
matches[0]["matcher"].replace("-matcher", ""),
112123
]
@@ -137,22 +148,30 @@ def get_findings(self, file, test):
137148

138149
finding_references = ""
139150
if vuln_datasource:
140-
finding_references += f"**Vulnerability Datasource:** {vuln_datasource}\n"
151+
finding_references += (
152+
f"**Vulnerability Datasource:** {vuln_datasource}\n"
153+
)
141154
if vuln_urls:
142155
if len(vuln_urls) == 1:
143156
if vuln_urls[0] != vuln_datasource:
144-
finding_references += f"**Vulnerability URL:** {vuln_urls[0]}\n"
157+
finding_references += (
158+
f"**Vulnerability URL:** {vuln_urls[0]}\n"
159+
)
145160
else:
146161
finding_references += "**Vulnerability URLs:**\n"
147162
for url in vuln_urls:
148163
if url != vuln_datasource:
149164
finding_references += f"- {url}\n"
150165
if rel_datasource:
151-
finding_references += f"**Related Vulnerability Datasource:** {rel_datasource}\n"
166+
finding_references += (
167+
f"**Related Vulnerability Datasource:** {rel_datasource}\n"
168+
)
152169
if rel_urls:
153170
if len(rel_urls) == 1:
154171
if rel_urls[0] != vuln_datasource:
155-
finding_references += f"**Related Vulnerability URL:** {rel_urls[0]}\n"
172+
finding_references += (
173+
f"**Related Vulnerability URL:** {rel_urls[0]}\n"
174+
)
156175
else:
157176
finding_references += "**Related Vulnerability URLs:**\n"
158177
for url in rel_urls:
@@ -209,7 +228,6 @@ def get_findings(self, file, test):
209228

210229
dupes[dupe_key].unsaved_tags = finding_tags
211230
dupes[dupe_key].unsaved_vulnerability_ids = vulnerability_ids
212-
213231
if settings.V3_FEATURE_LOCATIONS and artifact_purl:
214232
dupes[dupe_key].unsaved_locations.append(
215233
LocationData.dependency(purl=artifact_purl, file_path=file_path),
@@ -228,8 +246,7 @@ def get_cvss(self, cvss):
228246
vector = cvss_item["vector"]
229247
cvss_objects = cvss_parser.parse_cvss_from_text(vector)
230248
if len(cvss_objects) > 0 and isinstance(
231-
cvss_objects[0],
232-
CVSS3,
249+
cvss_objects[0], CVSS3,
233250
):
234251
return vector
235252
return None
@@ -259,11 +276,8 @@ def get_vulnerability_ids(self, vuln_id, related_vulnerabilities):
259276
if vuln_id:
260277
vulnerability_ids.append(vuln_id)
261278
if related_vulnerabilities:
262-
vulnerability_ids.extend(
263-
related_vulnerability_id
264-
for related_vulnerability in related_vulnerabilities
265-
if (related_vulnerability_id := related_vulnerability.get("id"))
266-
)
279+
vulnerability_ids.extend(related_vulnerability_id for related_vulnerability in related_vulnerabilities
280+
if (related_vulnerability_id := related_vulnerability.get("id")))
267281
if vulnerability_ids:
268282
return vulnerability_ids
269283
return None

dojo/tools/cargo_audit/parser.py

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,13 +80,24 @@ def get_findings(self, filename, test):
8080
vuln_id = advisory.get("id")
8181
vulnerability_ids = [advisory.get("id")]
8282
categories = f"**Categories:** {', '.join(advisory['categories'])}" if "categories" in advisory else ""
83-
description = categories + f"\n**Description:** `{advisory.get('description')}`"
83+
description = (
84+
categories
85+
+ f"\n**Description:** `{advisory.get('description')}`"
86+
)
8487

85-
if item["affected"] is not None and "functions" in item["affected"]:
88+
if (
89+
item["affected"] is not None
90+
and "functions" in item["affected"]
91+
):
8692
affected_func = [
87-
f"{func}: {', '.join(versions)}" for func, versions in item["affected"]["functions"].items()
93+
f'{func}: {", ".join(versions)}'
94+
for func, versions in item["affected"][
95+
"functions"
96+
].items()
8897
]
89-
description += f"\n**Affected functions**: {', '.join(affected_func)}"
98+
description += (
99+
f"\n**Affected functions**: {', '.join(affected_func)}"
100+
)
90101

91102
references = f"{advisory.get('url')}\n" + "\n".join(
92103
advisory["references"],

dojo/tools/dependency_check/parser.py

Lines changed: 53 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,7 @@ def add_finding(self, finding, dupes):
8787
dupes[key] = finding
8888

8989
def get_filename_and_path_from_dependency(
90-
self,
91-
dependency,
92-
related_dependency,
93-
namespace,
90+
self, dependency, related_dependency, namespace,
9491
):
9592
if related_dependency is None:
9693
return dependency.findtext(
@@ -107,10 +104,7 @@ def get_filename_and_path_from_dependency(
107104
return None, None
108105

109106
def get_component_name_and_version_from_dependency(
110-
self,
111-
dependency,
112-
related_dependency,
113-
namespace,
107+
self, dependency, related_dependency, namespace,
114108
):
115109
identifiers_node = dependency.find(namespace + "identifiers")
116110
if identifiers_node is not None:
@@ -122,13 +116,20 @@ def get_component_name_and_version_from_dependency(
122116
purl_parts = purl.to_dict()
123117
component_name = (
124118
purl_parts["namespace"] + ":"
125-
if purl_parts["namespace"] and len(purl_parts["namespace"]) > 0
119+
if purl_parts["namespace"]
120+
and len(purl_parts["namespace"]) > 0
121+
else ""
122+
)
123+
component_name += (
124+
purl_parts["name"]
125+
if purl_parts["name"] and len(purl_parts["name"]) > 0
126126
else ""
127127
)
128-
component_name += purl_parts["name"] if purl_parts["name"] and len(purl_parts["name"]) > 0 else ""
129128
component_name = component_name or None
130129
component_version = (
131-
purl_parts["version"] if purl_parts["version"] and len(purl_parts["version"]) > 0 else ""
130+
purl_parts["version"]
131+
if purl_parts["version"] and len(purl_parts["version"]) > 0
132+
else ""
132133
)
133134
return component_name, component_version, pck_id
134135

@@ -148,10 +149,20 @@ def get_component_name_and_version_from_dependency(
148149
if cpe_node:
149150
cpe_id = cpe_node.findtext(f"{namespace}name")
150151
cpe = CPE(cpe_id)
151-
component_name = cpe.get_vendor()[0] + ":" if len(cpe.get_vendor()) > 0 else ""
152-
component_name += cpe.get_product()[0] if len(cpe.get_product()) > 0 else ""
152+
component_name = (
153+
cpe.get_vendor()[0] + ":"
154+
if len(cpe.get_vendor()) > 0
155+
else ""
156+
)
157+
component_name += (
158+
cpe.get_product()[0] if len(cpe.get_product()) > 0 else ""
159+
)
153160
component_name = component_name or None
154-
component_version = cpe.get_version()[0] if len(cpe.get_version()) > 0 else None
161+
component_version = (
162+
cpe.get_version()[0]
163+
if len(cpe.get_version()) > 0
164+
else None
165+
)
155166
return component_name, component_version, None
156167

157168
maven_node = identifiers_node.find(
@@ -240,8 +251,7 @@ def get_severity_and_cvss_meta(self, vulnerability, namespace) -> dict:
240251
if severity:
241252
if severity.strip().lower() not in self.SEVERITY_MAPPING:
242253
logger.warning(
243-
"Warning: Unknow severity value detected '%s'. Bypass to 'Medium' value",
244-
severity,
254+
"Warning: Unknow severity value detected '%s'. Bypass to 'Medium' value", severity,
245255
)
246256
severity = "Medium"
247257
else:
@@ -256,20 +266,13 @@ def get_severity_and_cvss_meta(self, vulnerability, namespace) -> dict:
256266
}
257267

258268
def get_finding_from_vulnerability(
259-
self,
260-
dependency,
261-
related_dependency,
262-
vulnerability,
263-
test,
264-
namespace,
269+
self, dependency, related_dependency, vulnerability, test, namespace,
265270
):
266271
(
267272
dependency_filename,
268273
dependency_filepath,
269274
) = self.get_filename_and_path_from_dependency(
270-
dependency,
271-
related_dependency,
272-
namespace,
275+
dependency, related_dependency, namespace,
273276
)
274277
# logger.debug('dependency_filename: %s', dependency_filename)
275278

@@ -315,17 +318,13 @@ def get_finding_from_vulnerability(
315318
component_version,
316319
component_purl,
317320
) = self.get_component_name_and_version_from_dependency(
318-
dependency,
319-
related_dependency,
320-
namespace,
321+
dependency, related_dependency, namespace,
321322
)
322323

323324
stripped_name = name
324325
# startswith CVE-XXX-YYY
325326
stripped_name = re.sub(
326-
r"^CVE-\d{4}-\d{4,7}",
327-
"",
328-
stripped_name,
327+
r"^CVE-\d{4}-\d{4,7}", "", stripped_name,
329328
).strip()
330329
# startswith CWE-XXX:
331330
stripped_name = re.sub(r"^CWE-\d+\:", "", stripped_name).strip()
@@ -334,8 +333,7 @@ def get_finding_from_vulnerability(
334333

335334
if component_name is None:
336335
logger.warning(
337-
"component_name was None for File: %s, using dependency file name instead.",
338-
dependency_filename,
336+
"component_name was None for File: %s, using dependency file name instead.", dependency_filename,
339337
)
340338
component_name = dependency_filename
341339

@@ -354,9 +352,15 @@ def get_finding_from_vulnerability(
354352
ref_url = reference_node.findtext(f"{namespace}url")
355353
ref_name = reference_node.findtext(f"{namespace}name")
356354
if ref_url == ref_name:
357-
reference_detail += f"**Source:** {ref_source}\n**URL:** {ref_url}\n\n"
355+
reference_detail += (
356+
f"**Source:** {ref_source}\n**URL:** {ref_url}\n\n"
357+
)
358358
else:
359-
reference_detail += f"**Source:** {ref_source}\n**URL:** {ref_url}\n**Name:** {ref_name}\n\n"
359+
reference_detail += (
360+
f"**Source:** {ref_source}\n"
361+
f"**URL:** {ref_url}\n"
362+
f"**Name:** {ref_name}\n\n"
363+
)
360364

361365
if related_dependency is not None:
362366
tags.append("related")
@@ -366,18 +370,14 @@ def get_finding_from_vulnerability(
366370
notes = "Document on why we are suppressing this vulnerability is missing!"
367371
tags.append("no_suppression_document")
368372
mitigation = f"**This vulnerability is mitigated and/or suppressed:** {notes}\n"
369-
mitigation += (
370-
f"Update {component_name}:{component_version} to at least the version recommended in the description"
371-
)
373+
mitigation += f"Update {component_name}:{component_version} to at least the version recommended in the description"
372374
mitigated = datetime.datetime.now(datetime.UTC)
373375
is_Mitigated = True
374376
active = False
375377
tags.append("suppressed")
376378

377379
else:
378-
mitigation = (
379-
f"Update {component_name}:{component_version} to at least the version recommended in the description"
380-
)
380+
mitigation = f"Update {component_name}:{component_version} to at least the version recommended in the description"
381381
description += "\n**Filepath:** " + str(dependency_filepath)
382382
active = True
383383

@@ -467,15 +467,19 @@ def get_findings(self, filename, test):
467467
namespace + "relatedDependencies",
468468
)
469469
if relatedDependencies is not None:
470-
for relatedDependency in relatedDependencies.findall(
470+
for (
471+
relatedDependency
472+
) in relatedDependencies.findall(
471473
namespace + "relatedDependency",
472474
):
473-
finding = self.get_finding_from_vulnerability(
474-
dependency,
475-
relatedDependency,
476-
vulnerability,
477-
test,
478-
namespace,
475+
finding = (
476+
self.get_finding_from_vulnerability(
477+
dependency,
478+
relatedDependency,
479+
vulnerability,
480+
test,
481+
namespace,
482+
)
479483
)
480484
if finding: # could be None
481485
if scan_date:
@@ -499,9 +503,7 @@ def get_findings(self, filename, test):
499503
elif settings.V3_FEATURE_LOCATIONS:
500504
# Collect product-level dependency locations
501505
_, _, component_purl = self.get_component_name_and_version_from_dependency(
502-
dependency,
503-
None,
504-
namespace,
506+
dependency, None, namespace,
505507
)
506508
if component_purl:
507509
test.unsaved_metadata.append(

0 commit comments

Comments
 (0)