Skip to content

Reimport: Special statuses should be respected from reports #12249

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions dojo/importers/default_reimporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,15 @@ def close_old_findings(
# Determine if pushing to jira or if the finding groups are enabled
mitigated_findings = []
for finding in findings:
# Get any status changes that could have occurred earlier in the process
# for special statuses only.
# An example of such is a finding being reported as false positive, and
# reimport makes this change in the database. However, the findings here
# are calculated based from the original values before the reimport, so
# any updates made during reimport are discarded without first getting the
# state of the finding as it stands at this moment
finding.refresh_from_db(fields=["false_p", "risk_accepted", "out_of_scope"])
# Ensure the finding is not already closed
if not finding.mitigated or not finding.is_mitigated:
logger.debug("mitigating finding: %i:%s", finding.id, finding)
self.mitigate_finding(
Expand Down
34 changes: 25 additions & 9 deletions dojo/tools/checkmarx_one/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def parse_iac_vulnerabilities(
"description": (
f"{query.get('description')}\n\n"
f"**Category**: {query.get('category')}\n"),
"verified": query.get("state") != "TO_VERIFY",
"test": test,
}
# Iterate over the individual issues
Expand All @@ -81,6 +80,8 @@ def parse_iac_vulnerabilities(
date = self._parse_date(instance.get("firstDetectionDate"))
else:
date = self._parse_date(instance.get("lastDetectionDate"))
instance_details = self.determine_state(instance)
instance_details.update(base_finding_details)
# Create the finding object
finding = Finding(
severity=instance.get("severity").title(),
Expand All @@ -90,7 +91,7 @@ def parse_iac_vulnerabilities(
f"**Actual Value**: {instance.get('actualValue')}\n"
f"**Expected Value**: {instance.get('expectedValue')}\n"
),
**base_finding_details,
**instance_details,
)
# Add some details to the description
finding.description += (
Expand Down Expand Up @@ -174,14 +175,15 @@ def get_node_snippet(nodes: list) -> str:
date = self._parse_date(instance.get("firstFoundDate"))
else:
date = self._parse_date(instance.get("foundDate"))
instance_details = self.determine_state(instance)
instance_details.update(base_finding_details)
# Create the finding object
finding = Finding(
severity=instance.get("severity").title(),
date=date,
file_path=instance.get("destinationFileName"),
line=instance.get("destinationLine"),
verified=instance.get("state") != "TO_VERIFY",
**base_finding_details,
**instance_details,
)
# Add some details to the description
if node_snippet := get_node_snippet(instance.get("nodes", [])):
Expand Down Expand Up @@ -219,11 +221,9 @@ def parse_vulnerabilities(
+ "**uri**: " + locations_uri + "\n"
+ "**startLine**: " + str(locations_startLine) + "\n"
+ "**endLine**: " + str(locations_endLine) + "\n",
false_p=False,
duplicate=False,
out_of_scope=False,
static_finding=True,
dynamic_finding=False,
**self.determine_state(result),
)
findings.append(finding)
return findings
Expand Down Expand Up @@ -273,6 +273,7 @@ def get_results_sast(
test=test,
static_finding=True,
unique_id_from_tool=unique_id_from_tool,
**self.determine_state(vulnerability),
)

def get_results_kics(
Expand All @@ -290,11 +291,11 @@ def get_results_kics(
title=description,
description=description,
severity=vulnerability.get("severity").title(),
verified=vulnerability.get("state") != "TO_VERIFY",
file_path=file_path,
test=test,
static_finding=True,
unique_id_from_tool=unique_id_from_tool,
**self.determine_state(vulnerability),
)

def get_results_sca(
Expand All @@ -311,10 +312,10 @@ def get_results_sca(
title=description,
description=description,
severity=vulnerability.get("severity").title(),
verified=vulnerability.get("state") != "TO_VERIFY",
test=test,
static_finding=True,
unique_id_from_tool=unique_id_from_tool,
**self.determine_state(vulnerability),
)
if (cveId := vulnerability.get("cveId")) is not None:
finding.unsaved_vulnerability_ids = [cveId]
Expand All @@ -332,3 +333,18 @@ def get_findings(self, file, test):
findings = self.parse_results(test, results)

return findings

def determine_state(self, data: dict) -> dict:
"""
Determine the state of the findings as set by Checkmarx One docs
https://docs.checkmarx.com/en/34965-68516-managing--triaging--vulnerabilities0.html#UUID-bc2397a3-1614-48bc-ff2f-1bc342071c5a_UUID-ad4991d6-161f-f76e-7d04-970f158eff9b
"""
state = data.get("state")
return {
"active": state in {"TO_VERIFY", "PROPOSED_NOT_EXPLOITABLE", "CONFIRMED", "URGENT"},
"verified": state in {"NOT_EXPLOITABLE", "CONFIRMED", "URGENT"},
"false_p": state == "NOT_EXPLOITABLE",
# These are not managed by checkmarx one, but is nice to explicitly set them
"duplicate": False,
"out_of_scope": False,
}
12 changes: 9 additions & 3 deletions unittests/dojo_test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,14 +513,18 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1,
with (get_unit_tests_path() / filename).open(encoding="utf-8") as testfile:
payload = {
"minimum_severity": minimum_severity,
"active": active,
"verified": verified,
"scan_type": scan_type,
"file": testfile,
"version": "1.0.1",
"close_old_findings": close_old_findings,
}

if active is not None:
payload["active"] = active

if verified is not None:
payload["verified"] = verified

if engagement:
payload["engagement"] = engagement

Expand Down Expand Up @@ -669,14 +673,16 @@ def patch_finding_api(self, finding_id, finding_details, push_to_jira=None):
def assert_finding_count_json(self, count, findings_content_json):
self.assertEqual(findings_content_json["count"], count)

def get_test_findings_api(self, test_id, active=None, verified=None, is_mitigated=None, component_name=None, component_version=None, severity=None):
def get_test_findings_api(self, test_id, active=None, verified=None, is_mitigated=None, false_p=None, component_name=None, component_version=None, severity=None):
payload = {"test": test_id}
if active is not None:
payload["active"] = active
if verified is not None:
payload["verified"] = verified
if is_mitigated is not None:
payload["is_mitigated"] = is_mitigated
if false_p is not None:
payload["false_p"] = false_p
if component_name is not None:
payload["component_name"] = component_name
if severity is not None:
Expand Down
Loading