diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py index 77e937da85b..fefde7e24ef 100644 --- a/dojo/api_v2/serializers.py +++ b/dojo/api_v2/serializers.py @@ -219,7 +219,7 @@ def to_internal_value(self, data): except ValueError: self.fail("invalid_json") - logger.debug(f"data as json: {data}") + logger.debug("data as json: %s", data) if not isinstance(data, list): self.fail("not_a_list", input_type=type(data).__name__) @@ -238,7 +238,7 @@ def to_internal_value(self, data): tag_validator(sub, exception_class=RestFrameworkValidationError) data_safe.extend(substrings) - logger.debug(f"result after rendering tags: {data_safe}") + logger.debug("result after rendering tags: %s", data_safe) return data_safe def to_representation(self, value): @@ -1863,7 +1863,7 @@ class Meta: # Overriding this to push add Push to JIRA functionality def create(self, validated_data): - logger.debug(f"Creating finding with validated data: {validated_data}") + logger.debug("Creating finding with validated data: %s", validated_data) push_to_jira = validated_data.pop("push_to_jira", False) notes = validated_data.pop("notes", None) found_by = validated_data.pop("found_by", None) diff --git a/dojo/api_v2/views.py b/dojo/api_v2/views.py index f8cda3e8413..63bbd480f1f 100644 --- a/dojo/api_v2/views.py +++ b/dojo/api_v2/views.py @@ -2648,7 +2648,7 @@ def perform_create(self, serializer): jira_driver = test or (engagement or (product or None)) if jira_project := (jira_helper.get_jira_project(jira_driver) if jira_driver else None): push_to_jira = push_to_jira or jira_project.push_all_issues - logger.debug(f"push_to_jira: {push_to_jira}") + logger.debug("push_to_jira: %s", push_to_jira) serializer.save(push_to_jira=push_to_jira) diff --git a/dojo/finding/views.py b/dojo/finding/views.py index fad9c2f43f8..fc7fe43284f 100644 --- a/dojo/finding/views.py +++ b/dojo/finding/views.py @@ -486,7 +486,7 @@ def get_request_response(self, finding: Finding): burp_request = base64.b64decode(request_response.burpRequestBase64) burp_response = base64.b64decode(request_response.burpResponseBase64) except Exception as e: - logger.debug(f"unsuspected error: {e}") + logger.debug("unsuspected error: %s", e) return { "burp_request": burp_request, @@ -1597,7 +1597,7 @@ def request_finding_review(request, fid): reviewers = Dojo_User.objects.filter(id__in=form.cleaned_data["reviewers"]) reviewers_string = ", ".join([f"{user} ({user.id})" for user in reviewers]) reviewers_usernames = [user.username for user in reviewers] - logger.debug(f"Asking {reviewers_string} for review") + logger.debug("Asking %s for review", reviewers_string) create_notification( event="review_requested", # TODO: - if 'review_requested' functionality will be supported by API as well, 'create_notification' needs to be migrated to place where it will be able to cover actions from both interfaces diff --git a/dojo/importers/endpoint_manager.py b/dojo/importers/endpoint_manager.py index 7f408c909dc..db8c637ad83 100644 --- a/dojo/importers/endpoint_manager.py +++ b/dojo/importers/endpoint_manager.py @@ -111,7 +111,7 @@ def clean_unsaved_endpoints( try: endpoint.clean() except ValidationError as e: - logger.warning(f"DefectDojo is storing broken endpoint because cleaning wasn't successful: {e}") + logger.warning("DefectDojo is storing broken endpoint because cleaning wasn't successful: %s", e) def chunk_endpoints_and_reactivate( self, diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 00385e733e4..77977469240 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -204,7 +204,7 @@ def can_be_pushed_to_jira(obj, form=None): return False, f"Finding below the minimum JIRA severity threshold ({System_Settings.objects.get().jira_minimum_severity}).", "error_below_minimum_threshold" elif isinstance(obj, Finding_Group): finding_group_status = _safely_get_obj_status_for_jira(obj) - logger.error(f"Finding group status: {finding_group_status}") + logger.error("Finding group status: %s", finding_group_status) if "Empty" in finding_group_status: return False, f"{to_str_typed(obj)} cannot be pushed to jira as it contains no findings above minimum treshold.", "error_empty" diff --git a/dojo/management/commands/import_all_unittest_scans.py b/dojo/management/commands/import_all_unittest_scans.py index 04a4c84f156..cc4dd266250 100644 --- a/dojo/management/commands/import_all_unittest_scans.py +++ b/dojo/management/commands/import_all_unittest_scans.py @@ -120,7 +120,7 @@ def import_scan_with_params(self, filename, scan_type="ZAP Scan", engagement=1, return self.import_scan(payload, expected_http_status_code) def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engagement=10, engagements_per_product=50, products_per_product_type=15, *, include_very_big_scans=False, **kwargs): - logger.info(f"product_name_prefix: {product_name_prefix}, tests_per_engagement: {tests_per_engagement}, engagements_per_product: {engagements_per_product}, products_per_product_type: {products_per_product_type}") + logger.info("product_name_prefix: %s, tests_per_engagement: %s, engagements_per_product: %s, products_per_product_type: %s", product_name_prefix, tests_per_engagement, engagements_per_product, products_per_product_type) product_type_prefix = "Sample scans " + datetime.now().strftime("%Y-%m-%d %H:%M:%S") product_type_index = 1 @@ -159,7 +159,7 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme for attribute_name in dir(module): attribute = getattr(module, attribute_name) if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser": - logger.debug(f"Loading {module_name} parser") + logger.debug("Loading %s parser", module_name) scan_dir = Path("unittests") / "scans" / module_name for scan_file in scan_dir.glob("*.json"): if include_very_big_scans or scan_file.name != "very_many_vulns.json": # jfrog_xray file is huge and takes too long to import @@ -183,12 +183,12 @@ def import_all_unittest_scans(self, product_name_prefix=None, tests_per_engageme error_messages[module_name + "/" + scan_file.name] = result.get("message", str(e)) except: - logger.exception(f"failed to load {module_name}") + logger.exception("failed to load %s", module_name) raise - logger.error(f"Error count: {error_count}") + logger.error("Error count: %s", error_count) for scan, message in error_messages.items(): - logger.error(f"Error importing scan {scan}: {message}") + logger.error("Error importing scan %s: %s", scan, message) def handle(self, *args, **options): logger.info("EXPERIMENTAL: This command may be changed/deprecated/removed without prior notice.") diff --git a/dojo/management/commands/import_github_languages.py b/dojo/management/commands/import_github_languages.py index 9e1c45ffb32..89ddf80f7b8 100644 --- a/dojo/management/commands/import_github_languages.py +++ b/dojo/management/commands/import_github_languages.py @@ -43,7 +43,7 @@ def handle(self, *args, **options): try: language_type, created = Language_Type.objects.get_or_create(language=name) except Language_Type.MultipleObjectsReturned: - logger.warning(f"Language_Type {name} exists multiple times") + logger.warning("Language_Type %s exists multiple times", name) continue if created: @@ -52,4 +52,4 @@ def handle(self, *args, **options): language_type.color = element.get("color", 0) language_type.save() - logger.info(f"Finished importing languages from GitHub, added {new_language_types} Language_Types") + logger.info("Finished importing languages from GitHub, added %s Language_Types", new_language_types) diff --git a/dojo/models.py b/dojo/models.py index 9d3a238d9ca..792ddd19639 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -1866,7 +1866,7 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.path = self.path.replace(remove_str, "%00") - logger.error(f'Path "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') + logger.error('Path "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) if self.path == "": self.path = None @@ -1879,7 +1879,7 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.query = self.query.replace(remove_str, "%00") - logger.error(f'Query "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') + logger.error('Query "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) if self.query == "": self.query = None @@ -1892,7 +1892,7 @@ def clean(self): action_string = "Postgres does not accept NULL character. Attempting to replace with %00..." for remove_str in null_char_list: self.fragment = self.fragment.replace(remove_str, "%00") - logger.error(f'Fragment "{old_value}" has invalid format - It contains the NULL character. The following action was taken: {action_string}') + logger.error('Fragment "%s" has invalid format - It contains the NULL character. The following action was taken: %s', old_value, action_string) if self.fragment == "": self.fragment = None diff --git a/dojo/notifications/helper.py b/dojo/notifications/helper.py index 9c16d4b2cee..99edd43c593 100644 --- a/dojo/notifications/helper.py +++ b/dojo/notifications/helper.py @@ -234,7 +234,7 @@ def send_slack_notification( elif self.system_settings.slack_channel is not None: channel = self.system_settings.slack_channel logger.info( - f"Sending system notification to system channel {channel}.", + "Sending system notification to system channel %s.", channel, ) self._post_slack_message(event, user, channel, **kwargs) else: @@ -272,11 +272,11 @@ def _get_slack_user_id(self, user_email: str) -> str: if user_email == user["user"]["profile"]["email"]: if "id" in user["user"]: user_id = user["user"]["id"] - logger.debug(f"Slack user ID is {user_id}") + logger.debug("Slack user ID is %s", user_id) slack_user_is_found = True else: logger.warning( - f"A user with email {user_email} could not be found in this Slack workspace.", + "A user with email %s could not be found in this Slack workspace.", user_email, ) if not slack_user_is_found: @@ -496,7 +496,7 @@ def _get_webhook_endpoints( if not endpoints.exists(): if user: logger.info( - f"URLs for Webhooks not configured for user '{user}': skipping user notification", + "URLs for Webhooks not configured for user '%s': skipping user notification", user, ) else: logger.info( diff --git a/dojo/pipeline.py b/dojo/pipeline.py index cde01e0b0d9..e8bdd2512bf 100644 --- a/dojo/pipeline.py +++ b/dojo/pipeline.py @@ -99,7 +99,7 @@ def update_azure_groups(backend, uid, user=None, social=None, *args, **kwargs): logger.debug("Skipping group " + group_name + " due to AZUREAD_TENANT_OAUTH2_GROUPS_FILTER " + settings.AZUREAD_TENANT_OAUTH2_GROUPS_FILTER) continue except Exception as e: - logger.error(f"Could not call microsoft graph API or save groups to member: {e}") + logger.error("Could not call microsoft graph API or save groups to member: %s", e) if len(group_names) > 0: assign_user_to_groups(user, group_names, Dojo_Group.AZURE) if settings.AZUREAD_TENANT_OAUTH2_CLEANUP_GROUPS: diff --git a/dojo/product/helpers.py b/dojo/product/helpers.py index ff7ada2dbd9..aeadec0246d 100644 --- a/dojo/product/helpers.py +++ b/dojo/product/helpers.py @@ -18,16 +18,16 @@ def propagate_tags_on_product(product_id, *args, **kwargs): def propagate_tags_on_product_sync(product): # enagagements - logger.debug(f"Propagating tags from {product} to all engagements") + logger.debug("Propagating tags from %s to all engagements", product) propagate_tags_on_object_list(Engagement.objects.filter(product=product)) # tests - logger.debug(f"Propagating tags from {product} to all tests") + logger.debug("Propagating tags from %s to all tests", product) propagate_tags_on_object_list(Test.objects.filter(engagement__product=product)) # findings - logger.debug(f"Propagating tags from {product} to all findings") + logger.debug("Propagating tags from %s to all findings", product) propagate_tags_on_object_list(Finding.objects.filter(test__engagement__product=product)) # endpoints - logger.debug(f"Propagating tags from {product} to all endpoints") + logger.debug("Propagating tags from %s to all endpoints", product) propagate_tags_on_object_list(Endpoint.objects.filter(product=product)) diff --git a/dojo/search/views.py b/dojo/search/views.py index d075331bbc5..fbddc7aaa79 100644 --- a/dojo/search/views.py +++ b/dojo/search/views.py @@ -422,9 +422,9 @@ def parse_search_query(clean_query): else: keywords.append(vulnerability_id_fix(query_part)) - logger.debug(f"query: {clean_query}") - logger.debug(f"operators: {operators}") - logger.debug(f"keywords: {keywords}") + logger.debug("query: %s", clean_query) + logger.debug("operators: %s", operators) + logger.debug("keywords: %s", keywords) return operators, keywords diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index e8276c28c18..c10b364059f 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1364,7 +1364,7 @@ def saml2_attrib_map_format(din): logger.info(f"Replacing {key} with value {value} (previously set to {HASHCODE_FIELDS_PER_SCANNER[key]}) from env var DD_HASHCODE_FIELDS_PER_SCANNER") HASHCODE_FIELDS_PER_SCANNER[key] = value if key not in HASHCODE_FIELDS_PER_SCANNER: - logger.info(f"Adding {key} with value {value} from env var DD_HASHCODE_FIELDS_PER_SCANNER") + logger.info("Adding %s with value %s from env var DD_HASHCODE_FIELDS_PER_SCANNER", key, value) HASHCODE_FIELDS_PER_SCANNER[key] = value @@ -1618,7 +1618,7 @@ def saml2_attrib_map_format(din): logger.info(f"Replacing {key} with value {value} (previously set to {DEDUPLICATION_ALGORITHM_PER_PARSER[key]}) from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value if key not in DEDUPLICATION_ALGORITHM_PER_PARSER: - logger.info(f"Adding {key} with value {value} from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER") + logger.info("Adding %s with value %s from env var DD_DEDUPLICATION_ALGORITHM_PER_PARSER", key, value) DEDUPLICATION_ALGORITHM_PER_PARSER[key] = value DUPE_DELETE_MAX_PER_RUN = env("DD_DUPE_DELETE_MAX_PER_RUN") diff --git a/dojo/sla_config/helpers.py b/dojo/sla_config/helpers.py index a7422869536..57633d0c2ec 100644 --- a/dojo/sla_config/helpers.py +++ b/dojo/sla_config/helpers.py @@ -21,7 +21,7 @@ def update_sla_expiration_dates_product_async(product, sla_config, *args, **kwar def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities=None): - logger.info(f"Updating finding SLA expiration dates within the {sla_config} SLA configuration") + logger.info("Updating finding SLA expiration dates within the %s SLA configuration", sla_config) # update each finding that is within the SLA configuration that was saved findings = Finding.objects.filter(test__engagement__product__sla_configuration_id=sla_config.id) if products: @@ -49,4 +49,4 @@ def update_sla_expiration_dates_sla_config_sync(sla_config, products, severities # reset the async updating flag to false for this sla config sla_config.async_updating = False super(SLA_Configuration, sla_config).save() - logger.info(f"DONE Updating finding SLA expiration dates within the {sla_config} SLA configuration") + logger.info("DONE Updating finding SLA expiration dates within the %s SLA configuration", sla_config) diff --git a/dojo/tools/anchore_grype/parser.py b/dojo/tools/anchore_grype/parser.py index 9dcd971b1fc..2cf89b87f44 100644 --- a/dojo/tools/anchore_grype/parser.py +++ b/dojo/tools/anchore_grype/parser.py @@ -30,9 +30,9 @@ def get_description_for_scan_types(self, scan_type): ) def get_findings(self, file, test): - logger.debug(f"file: {file}") + logger.debug("file: %s", file) data = json.load(file) - logger.debug(f"data: {data}") + logger.debug("data: %s", data) dupes = {} for item in data.get("matches", []): vulnerability = item["vulnerability"] @@ -223,11 +223,11 @@ def get_cvss(self, cvss): def get_epss_values(self, vuln_id, epss_list): if not isinstance(epss_list, list): - logger.debug(f"epss_list is not a list: {epss_list}") + logger.debug("epss_list is not a list: %s", epss_list) return None, None if isinstance(epss_list, list): - logger.debug(f"epss_list: {epss_list}") + logger.debug("epss_list: %s", epss_list) for epss_data in epss_list: if epss_data.get("cve") != vuln_id: continue @@ -235,10 +235,10 @@ def get_epss_values(self, vuln_id, epss_list): epss_score = float(epss_data.get("epss")) epss_percentile = float(epss_data.get("percentile")) except (TypeError, ValueError): - logger.debug(f"epss_data is not a float: {epss_data}") + logger.debug("epss_data is not a float: %s", epss_data) else: return epss_score, epss_percentile - logger.debug(f"epss not found for vuln_id: {vuln_id} in epss_list: {epss_list}") + logger.debug("epss not found for vuln_id: %s in epss_list: %s", vuln_id, epss_list) return None, None def get_vulnerability_ids(self, vuln_id, related_vulnerabilities): diff --git a/dojo/tools/api_bugcrowd/importer.py b/dojo/tools/api_bugcrowd/importer.py index d83d1edf3cd..ac14adda4a6 100644 --- a/dojo/tools/api_bugcrowd/importer.py +++ b/dojo/tools/api_bugcrowd/importer.py @@ -29,7 +29,7 @@ def get_findings(self, test): for page in submissions_paged: submissions += page counter += 1 - logger.debug(f"{counter} Bugcrowd submissions pages fetched") + logger.debug("%s Bugcrowd submissions pages fetched", counter) return submissions, config diff --git a/dojo/tools/api_bugcrowd/parser.py b/dojo/tools/api_bugcrowd/parser.py index 9f98ac9d1b6..e61025e1736 100644 --- a/dojo/tools/api_bugcrowd/parser.py +++ b/dojo/tools/api_bugcrowd/parser.py @@ -155,7 +155,7 @@ def get_findings(self, file, test): finding.unsaved_endpoints = [bug_endpoint] except Exception as e: logger.error( - f"{bug_endpoint} bug url from bugcrowd failed to parse to endpoint, error= {e}", + "%s bug url from bugcrowd failed to parse to endpoint, error= %s", bug_endpoint, e, ) except ValidationError: logger.error( diff --git a/dojo/tools/api_sonarqube/updater.py b/dojo/tools/api_sonarqube/updater.py index c964760526f..18d547fea05 100644 --- a/dojo/tools/api_sonarqube/updater.py +++ b/dojo/tools/api_sonarqube/updater.py @@ -120,7 +120,7 @@ def update_sonarqube_finding(self, finding): return logger.debug( - f"Checking if finding '{finding}' needs to be updated in SonarQube", + "Checking if finding '%s' needs to be updated in SonarQube", finding, ) client, _ = SonarQubeApiImporter.prepare_client(finding.test) @@ -141,7 +141,7 @@ def update_sonarqube_finding(self, finding): current_status = issue.get("status") logger.debug( - f"--> SQ Current status: {current_status}. Current target status: {target_status}", + "--> SQ Current status: %s. Current target status: %s", current_status, target_status, ) transitions = self.get_sonarqube_required_transitions_for( @@ -149,7 +149,7 @@ def update_sonarqube_finding(self, finding): ) if transitions: logger.info( - f"Updating finding '{finding}' in SonarQube", + "Updating finding '%s' in SonarQube", finding, ) for transition in transitions: diff --git a/dojo/tools/api_sonarqube/updater_from_source.py b/dojo/tools/api_sonarqube/updater_from_source.py index a6a467861c3..d7028bd2ca9 100644 --- a/dojo/tools/api_sonarqube/updater_from_source.py +++ b/dojo/tools/api_sonarqube/updater_from_source.py @@ -42,14 +42,14 @@ def update(self, finding): current_status = issue.get("resolution") or issue.get("status") current_finding_status = self.get_sonarqube_status_for(finding) logger.debug( - f"--> SQ Current status: {current_status}. Finding status: {current_finding_status}", + "--> SQ Current status: %s. Finding status: %s", current_status, current_finding_status, ) if ( current_status not in {"OPEN", current_finding_status} ): logger.info( - f"Original SonarQube issue '{sonarqube_issue}' has changed. Updating DefectDojo finding '{finding}'...", + "Original SonarQube issue '%s' has changed. Updating DefectDojo finding '%s'...", sonarqube_issue, finding, ) self.update_finding_status(finding, current_status) diff --git a/dojo/tools/appcheck_web_application_scanner/parser.py b/dojo/tools/appcheck_web_application_scanner/parser.py index 485d879e5be..40ece07bbbb 100644 --- a/dojo/tools/appcheck_web_application_scanner/parser.py +++ b/dojo/tools/appcheck_web_application_scanner/parser.py @@ -61,5 +61,5 @@ def get_findings(self, file, test): finding.test = test findings[dupe_key] = finding else: - LOGGER.warning(f"Skipping entry; could not find parser for scanning engine named: {scanning_engine}") + LOGGER.warning("Skipping entry; could not find parser for scanning engine named: %s", scanning_engine) return list(findings.values()) diff --git a/dojo/tools/burp/parser.py b/dojo/tools/burp/parser.py index e45e844ec1c..d8cc9aaf969 100644 --- a/dojo/tools/burp/parser.py +++ b/dojo/tools/burp/parser.py @@ -299,7 +299,7 @@ def get_item(item_node, test): if len(cwes) > 1: # TODO: support more than one CWE logger.debug( - f"more than one CWE for a finding {cwes}. NOT supported by parser API", + "more than one CWE for a finding %s. NOT supported by parser API", cwes, ) if len(cwes) > 0: finding.cwe = cwes[0] diff --git a/dojo/tools/checkmarx_cxflow_sast/parser.py b/dojo/tools/checkmarx_cxflow_sast/parser.py index 59c62b1afc7..f35dfca36a9 100644 --- a/dojo/tools/checkmarx_cxflow_sast/parser.py +++ b/dojo/tools/checkmarx_cxflow_sast/parser.py @@ -55,7 +55,7 @@ def get_findings(self, file, test): if file.name.strip().lower().endswith(".json"): return self._get_findings_json(file, test) # TODO: support CxXML format - logger.warning(f"Not supported file format ${file}") + logger.warning("Not supported file format $%s", file) return [] def _get_findings_json(self, file, test): @@ -98,7 +98,7 @@ def _get_findings_json(self, file, test): for detail_key in issue.get("details"): if detail_key not in map_paths: - logger.warning(f"{detail_key} not found in path, ignore") + logger.warning("%s not found in path, ignore", detail_key) else: detail = map_paths[detail_key] diff --git a/dojo/tools/cyberwatch_galeax/parser.py b/dojo/tools/cyberwatch_galeax/parser.py index 02c0d6f85ea..7c8353917e2 100644 --- a/dojo/tools/cyberwatch_galeax/parser.py +++ b/dojo/tools/cyberwatch_galeax/parser.py @@ -22,11 +22,11 @@ def get_description_for_scan_types(self, scan_type): return "Import Cyberwatch Cve and Security Issue data in JSON format, you can get the json from this tool : https://github.com/Galeax/Cyberwatch-API-DefectDojo" def get_findings(self, filename, test): - logger.debug(f"Starting get_findings with filename: {filename}") + logger.debug("Starting get_findings with filename: %s", filename) try: file_content = self.read_file_content(filename) except Exception as e: - logger.error(f"Error processing file: {e}") + logger.error("Error processing file: %s", e) return [] else: data = json.loads(file_content) @@ -75,12 +75,12 @@ def collect_cve_data(self, json_data, cve_data): # Safely handle when "cwes" is null cwes = json_data.get("cwes") or {} if not isinstance(cwes, dict): - logger.error(f"Invalid cwes data: {cwes}") + logger.error("Invalid cwes data: %s", cwes) cwes = {} cwes_ids = cwes.get("cwe_id", []) if not isinstance(cwes_ids, list): - logger.error(f"Invalid cwe_id data: {cwes_ids}") + logger.error("Invalid cwe_id data: %s", cwes_ids) cwes_ids = [] if cwes_ids: try: @@ -139,7 +139,7 @@ def collect_cve_data(self, json_data, cve_data): servers = json_data.get("servers", []) if not isinstance(servers, list): - logger.error(f"servers is not a list: {servers}") + logger.error("servers is not a list: %s", servers) return server_lookup = {s.get("computer_name", ""): s for s in servers if isinstance(s, dict)} @@ -327,7 +327,7 @@ def create_finding( try: finding.epss_score = float(epss) except Exception: - logger.error(f"Error converting epss score to percentage: {epss}") + logger.error("Error converting epss score to percentage: %s", epss) finding.epss_score = epss return finding finding.epss_score = epss @@ -404,7 +404,7 @@ def process_security_issue(self, json_data, test): servers = json_data.get("servers", []) if not isinstance(servers, list): - logger.error(f"servers is not a list: {servers}") + logger.error("servers is not a list: %s", servers) return None unsaved_endpoints, unsaved_endpoint_status, active_status, mitigated_date = self.process_servers_for_security_issue(servers) @@ -452,7 +452,7 @@ def process_servers_for_security_issue(self, servers): for server in servers: if not server or not isinstance(server, dict): - logger.error(f"Invalid server data: {server}") + logger.error("Invalid server data: %s", server) continue computer_name = server.get("computer_name", "Unknown Hostname") @@ -499,7 +499,7 @@ def parse_fixed_at(self, fixed_at_str): try: return datetime.strptime(fixed_at_str, "%Y-%m-%dT%H:%M:%S.%f%z") except ValueError as e: - logger.error(f'Error parsing fixed_at date "{fixed_at_str}": {e}') + logger.error('Error parsing fixed_at date "%s": %s', fixed_at_str, e) return datetime.now() def parse_datetime(self, dt_str): @@ -508,7 +508,7 @@ def parse_datetime(self, dt_str): try: return datetime.strptime(dt_str, "%Y-%m-%dT%H:%M:%S.%f%z") except (ValueError, TypeError): - logger.error(f'Error parsing datetime "{dt_str}"') + logger.error('Error parsing datetime "%s"', dt_str) return datetime.now() def parse_cvss(self, cvss_v3_vector, json_data): @@ -519,7 +519,7 @@ def parse_cvss(self, cvss_v3_vector, json_data): cvssv3_score = vectors[0].scores()[0] severity = vectors[0].severities()[0] return cvssv3, cvssv3_score, severity - logger.error(f"Invalid CVSS v3 vector: {cvss_v3_vector}") + logger.error("Invalid CVSS v3 vector: %s", cvss_v3_vector) severity = self.convert_severity(json_data.get("cve_level", "Info")) return None, None, severity diff --git a/dojo/tools/cyclonedx/helpers.py b/dojo/tools/cyclonedx/helpers.py index f9a7b67f80c..4ca188193d6 100644 --- a/dojo/tools/cyclonedx/helpers.py +++ b/dojo/tools/cyclonedx/helpers.py @@ -15,13 +15,13 @@ def _get_cvssv3(self, raw_vector): return CVSS3(raw_vector) except BaseException: LOGGER.exception( - f"error while parsing vector CVSS v3 {raw_vector}", + "error while parsing vector CVSS v3 %s", raw_vector, ) return None def _get_component(self, components, reference): if reference not in components: - LOGGER.warning(f"reference:{reference} not found in the BOM") + LOGGER.warning("reference:%s not found in the BOM", reference) return (None, None) if "version" not in components[reference]: return (components[reference]["name"], None) diff --git a/dojo/tools/cyclonedx/json_parser.py b/dojo/tools/cyclonedx/json_parser.py index a289dae7d0c..a53b9dd799d 100644 --- a/dojo/tools/cyclonedx/json_parser.py +++ b/dojo/tools/cyclonedx/json_parser.py @@ -105,7 +105,7 @@ def _get_findings_json(self, file, test): if cwes and len(cwes) > 1: # TODO: support more than one CWE LOGGER.debug( - f"more than one CWE for a finding {cwes}. NOT supported by parser API", + "more than one CWE for a finding %s. NOT supported by parser API", cwes, ) if cwes and len(cwes) > 0: finding.cwe = cwes[0] diff --git a/dojo/tools/cyclonedx/xml_parser.py b/dojo/tools/cyclonedx/xml_parser.py index 84decac9b77..54363b49217 100644 --- a/dojo/tools/cyclonedx/xml_parser.py +++ b/dojo/tools/cyclonedx/xml_parser.py @@ -156,7 +156,7 @@ def manage_vulnerability_legacy( if len(cwes) > 1: # TODO: support more than one CWE LOGGER.debug( - f"more than one CWE for a finding {cwes}. NOT supported by parser API", + "more than one CWE for a finding %s. NOT supported by parser API", cwes, ) if len(cwes) > 0: finding.cwe = cwes[0] @@ -266,7 +266,7 @@ def _manage_vulnerability_xml( if len(cwes) > 1: # TODO: support more than one CWE LOGGER.debug( - f"more than one CWE for a finding {cwes}. NOT supported by parser API", + "more than one CWE for a finding %s. NOT supported by parser API", cwes, ) if len(cwes) > 0: finding.cwe = cwes[0] diff --git a/dojo/tools/dependency_check/parser.py b/dojo/tools/dependency_check/parser.py index 9de6b3c2f75..ec14ac8f196 100644 --- a/dojo/tools/dependency_check/parser.py +++ b/dojo/tools/dependency_check/parser.py @@ -248,7 +248,7 @@ def get_severity_and_cvss_meta(self, vulnerability, namespace) -> dict: if severity: if severity.strip().lower() not in self.SEVERITY_MAPPING: logger.warning( - f"Warning: Unknow severity value detected '{severity}'. Bypass to 'Medium' value", + "Warning: Unknow severity value detected '%s'. Bypass to 'Medium' value", severity, ) severity = "Medium" else: @@ -329,7 +329,7 @@ def get_finding_from_vulnerability( if component_name is None: logger.warning( - f"component_name was None for File: {dependency_filename}, using dependency file name instead.", + "component_name was None for File: %s, using dependency file name instead.", dependency_filename, ) component_name = dependency_filename diff --git a/dojo/tools/factory.py b/dojo/tools/factory.py index b6f70033196..bbef84df731 100644 --- a/dojo/tools/factory.py +++ b/dojo/tools/factory.py @@ -24,7 +24,7 @@ def register(parser_type): def register_parser(scan_type, parser): - logger.debug(f"register scan_type:{scan_type} with parser:{parser}") + logger.debug("register scan_type:%s with parser:%s", scan_type, parser) # check double registration or registration with an existing key if scan_type in PARSERS: msg = f"Try to register an existing parser '{scan_type}'" @@ -122,4 +122,4 @@ def requires_tool_type(scan_type): if isclass(attribute) and attribute_name.lower() == module_name.replace("_", "") + "parser": register(attribute) except: - logger.exception(f"failed to load {module_name}") + logger.exception("failed to load %s", module_name) diff --git a/dojo/tools/fortify/fpr_parser.py b/dojo/tools/fortify/fpr_parser.py index f348aa265d6..dbb8365e626 100644 --- a/dojo/tools/fortify/fpr_parser.py +++ b/dojo/tools/fortify/fpr_parser.py @@ -68,19 +68,19 @@ def parse_related_data(self, root: Element, test: Test) -> None: related_data = FortifyRelatedData() for description in root.findall("Description", self.namespaces): class_id = description.attrib.get("classID") - logger.debug(f"Description: {class_id}") + logger.debug("Description: %s", class_id) if class_id: related_data.descriptions[class_id] = self.parse_description_information(description) for snippet in root.find("Snippets", self.namespaces): snippet_id = snippet.attrib.get("id") - logger.debug(f"Snippet: {snippet_id}") + logger.debug("Snippet: %s", snippet_id) if snippet_id: related_data.snippets[snippet_id] = self.parse_snippet_information(snippet) for rule in root.find("EngineData", self.namespaces).find("RuleInfo", self.namespaces): rule_id = rule.attrib.get("id") - logger.debug(f"Rule: {rule_id}") + logger.debug("Rule: %s", rule_id) if rule_id: related_data.rules[rule_id] = self.parse_rule_information(rule.find("MetaInfo", self.namespaces)) return related_data @@ -95,11 +95,11 @@ def add_audit_log(self, related_data, audit_log: Element) -> None: if instance_id: suppressed_string = issue.attrib.get("suppressed") suppressed = suppressed_string.lower() == "true" if suppressed_string else False - logger.debug(f"Issue: {instance_id} - Suppressed: {suppressed}") + logger.debug("Issue: %s - Suppressed: %s", instance_id, suppressed) related_data.suppressed[instance_id] = suppressed threaded_comments = issue.find("ThreadedComments", self.namespaces_audit_log) - logger.debug(f"ThreadedComments: {threaded_comments}") + logger.debug("ThreadedComments: %s", threaded_comments) if threaded_comments is not None: related_data.threaded_comments[instance_id] = [self.get_comment_text(comment) for comment in threaded_comments.findall("Comment", self.namespaces_audit_log)] return related_data @@ -295,7 +295,7 @@ def compute_severity(self, vulnerability, snippet, description, rule) -> str: # This comes from Fortify support documentation, requested in #11901 likelihood = (accuracy * confidence * probability) / 25 likelihood = round(likelihood, 1) - logger.debug(f"Impact: {impact}, Likelihood: {likelihood}") + logger.debug("Impact: %s, Likelihood: %s", impact, likelihood) if impact >= 2.5 and likelihood >= 2.5: return "Critical" diff --git a/dojo/tools/qualys/csv_parser.py b/dojo/tools/qualys/csv_parser.py index 49d31b1783d..b42a564c070 100644 --- a/dojo/tools/qualys/csv_parser.py +++ b/dojo/tools/qualys/csv_parser.py @@ -113,9 +113,9 @@ def _extract_cvss_vectors(cvss_base, cvss_temporal): try: cvss_vector += re.search(vector_pattern, cvss_base).group(1) except IndexError: - _logger.error(f"CVSS3 Base Vector not found in {cvss_base}") + _logger.error("CVSS3 Base Vector not found in %s", cvss_base) except AttributeError: - _logger.error(f"CVSS3 Base Vector not found in {cvss_base}") + _logger.error("CVSS3 Base Vector not found in %s", cvss_base) if cvss_temporal: try: cvss_temporal_vector = re.search( @@ -125,11 +125,11 @@ def _extract_cvss_vectors(cvss_base, cvss_temporal): cvss_vector += cvss_temporal_vector except IndexError: _logger.error( - f"CVSS3 Temporal Vector not found in {cvss_base}", + "CVSS3 Temporal Vector not found in %s", cvss_base, ) except AttributeError: _logger.error( - f"CVSS3 Temporal Vector not found in {cvss_base}", + "CVSS3 Temporal Vector not found in %s", cvss_base, ) return cvss_vector diff --git a/dojo/tools/twistlock/parser.py b/dojo/tools/twistlock/parser.py index 3042baa2e8f..99f2a8afeed 100644 --- a/dojo/tools/twistlock/parser.py +++ b/dojo/tools/twistlock/parser.py @@ -41,7 +41,7 @@ def parse_issue(self, row, test): # Handle alternative formats finding_date = datetime.strptime(date_str[:10], "%Y-%m-%d").date() except ValueError: - logger.warning(f"Could not parse date: {date_str}") + logger.warning("Could not parse date: %s", date_str) # Build container/image metadata for impact field (Item 3) impact_parts = [] diff --git a/dojo/tools/wiz/parser.py b/dojo/tools/wiz/parser.py index c869e1cac2e..41a22c3c616 100644 --- a/dojo/tools/wiz/parser.py +++ b/dojo/tools/wiz/parser.py @@ -195,7 +195,7 @@ def _parse_tags(self, tags: str) -> list[str]: def _validate_severities(self, severity: str) -> str: """Ensure the supplied severity fits what DefectDojo is expecting.""" if severity not in SEVERITIES: - logger.error(f"Severity is not supported: {severity}") + logger.error("Severity is not supported: %s", severity) # Default to Info severity return "Info" return severity @@ -260,6 +260,6 @@ def parse_wiz_datetime(row: dict, column: str) -> datetime: ) if not parsed_value: - logger.warning(f"Unable to parse Resolved Time: {value}") + logger.warning("Unable to parse Resolved Time: %s", value) return parsed_value diff --git a/dojo/user/views.py b/dojo/user/views.py index 998dd25a909..853791a319d 100644 --- a/dojo/user/views.py +++ b/dojo/user/views.py @@ -650,7 +650,7 @@ def clean(self): connection.open() connection.close() except Exception as e: - logger.error(f"SMTP Server Connection Failure: {e}") + logger.error("SMTP Server Connection Failure: %s", e) msg = "SMTP server is not configured correctly..." raise ValidationError(msg) diff --git a/dojo/utils.py b/dojo/utils.py index 991b6b84075..929fef76f0f 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1956,7 +1956,7 @@ def _create_notifications(): query = Q(active=True, verified=True, is_mitigated=False, duplicate=False) elif system_settings.enable_notify_sla_active: query = Q(active=True, is_mitigated=False, duplicate=False) - logger.debug(f"My query: {query}") + logger.debug("My query: %s", query) no_jira_findings = {} if system_settings.enable_notify_sla_jira_only: @@ -2011,19 +2011,19 @@ def _create_notifications(): jira_count += 1 jira_instance = jira_helper.get_jira_instance(finding) if jira_instance is not None: - logger.debug(f"JIRA config for finding is {jira_instance}") + logger.debug("JIRA config for finding is %s", jira_instance) # global config or product config set, product level takes precedence try: # TODO: see new property from #2649 to then replace, somehow not working with prefetching though. product_jira_sla_comment_enabled = jira_helper.get_jira_project(finding).product_jira_sla_notification except Exception as e: logger.error("The product is not linked to a JIRA configuration! Something is weird here.") - logger.error(f"Error is: {e}") + logger.error("Error is: %s", e) jiraconfig_sla_notification_enabled = jira_instance.global_jira_sla_notification if jiraconfig_sla_notification_enabled or product_jira_sla_comment_enabled: - logger.debug(f"Global setting {jiraconfig_sla_notification_enabled} -- Product setting {product_jira_sla_comment_enabled}") + logger.debug("Global setting %s -- Product setting %s", jiraconfig_sla_notification_enabled, product_jira_sla_comment_enabled) do_jira_sla_comment = True logger.debug(f"JIRA issue is {jira_issue.jira_key}") @@ -2048,7 +2048,7 @@ def _create_notifications(): _add_notification(finding, "breaching") _create_notifications() - logger.info(f"SLA run results: Pre-breach: {pre_breach_count}, at-breach: {at_breach_count}, post-breach: {post_breach_count}, post-breach-no-notify: {post_breach_no_notify_count}, with-jira: {jira_count}, TOTAL: {total_count}") + logger.info("SLA run results: Pre-breach: %s, at-breach: %s, post-breach: %s, post-breach-no-notify: %s, with-jira: %s, TOTAL: %s", pre_breach_count, at_breach_count, post_breach_count, post_breach_no_notify_count, jira_count, total_count) except System_Settings.DoesNotExist: logger.info("Findings SLA is not enabled.") diff --git a/dojo/validators.py b/dojo/validators.py index aa3c4e5da5c..5554f0b0b53 100644 --- a/dojo/validators.py +++ b/dojo/validators.py @@ -27,7 +27,7 @@ def tag_validator(value: str | list[str], exception_class: Callable = Validation error_messages.append(f"Value must be a string or list of strings: {value} - {type(value)}.") if error_messages: - logger.debug(f"Tag validation failed: {error_messages}") + logger.debug("Tag validation failed: %s", error_messages) raise exception_class(error_messages) diff --git a/tests/base_test_class.py b/tests/base_test_class.py index 9e70739c27a..939c72265eb 100644 --- a/tests/base_test_class.py +++ b/tests/base_test_class.py @@ -331,7 +331,7 @@ def enable_github(self): def set_block_execution(self, *, block_execution=True): # we set the admin user (ourselves) to have block_execution checked # this will force dedupe to happen synchronously, among other things like notifications, rules, ... - logger.info(f"setting block execution to: {block_execution}") + logger.info("setting block execution to: %s", block_execution) driver = self.driver driver.get(self.base_url + "profile") if ( diff --git a/tests/notes_test.py b/tests/notes_test.py index a569da5b052..f3aa1540b73 100644 --- a/tests/notes_test.py +++ b/tests/notes_test.py @@ -51,7 +51,7 @@ def create_private_note(self, driver, level): private_status = "(will not appear in report)" in text pass_test = note_present and private_status if not pass_test: - logger.info(f"Private note note created at the {level} level") + logger.info("Private note note created at the %s level", level) self.assertTrue(pass_test) def test_finding_note(self): diff --git a/unittests/test_factory.py b/unittests/test_factory.py index 06ad5344711..d03f7f4d35c 100644 --- a/unittests/test_factory.py +++ b/unittests/test_factory.py @@ -83,5 +83,5 @@ def test_parser_name_matches_module(self): if not found and module_name != "__pycache__": missing_parsers.append(module_name) if len(missing_parsers) > 0: - logger.error(f"Parsers with invalid names: {missing_parsers}") + logger.error("Parsers with invalid names: %s", missing_parsers) self.assertEqual(0, len(missing_parsers))