Skip to content

Commit

Permalink
removing unused parallel arguments (#492)
Browse files Browse the repository at this point in the history
  • Loading branch information
Szasza authored Mar 25, 2024
1 parent a915385 commit 2cde116
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 38 deletions.
43 changes: 12 additions & 31 deletions parsedmarc/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ class InvalidForensicReport(InvalidDMARCReport):


def _parse_report_record(record, ip_db_path=None, offline=False,
nameservers=None, dns_timeout=2.0, parallel=False):
nameservers=None, dns_timeout=2.0):
"""
Converts a record from a DMARC aggregate report into a more consistent
format
Expand All @@ -95,8 +95,7 @@ def _parse_report_record(record, ip_db_path=None, offline=False,
ip_db_path=ip_db_path,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel)
timeout=dns_timeout)
new_record["source"] = new_record_source
new_record["count"] = int(record["row"]["count"])
policy_evaluated = record["row"]["policy_evaluated"].copy()
Expand Down Expand Up @@ -389,7 +388,7 @@ def parsed_smtp_tls_reports_to_csv(reports):

def parse_aggregate_report_xml(xml, ip_db_path=None, offline=False,
nameservers=None, timeout=2.0,
parallel=False, keep_alive=None):
keep_alive=None):
"""Parses a DMARC XML report string and returns a consistent OrderedDict
Args:
Expand All @@ -399,7 +398,6 @@ def parse_aggregate_report_xml(xml, ip_db_path=None, offline=False,
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
parallel (bool): Parallel processing
keep_alive (callable): Keep alive function
Returns:
Expand Down Expand Up @@ -521,17 +519,15 @@ def parse_aggregate_report_xml(xml, ip_db_path=None, offline=False,
ip_db_path=ip_db_path,
offline=offline,
nameservers=nameservers,
dns_timeout=timeout,
parallel=parallel)
dns_timeout=timeout)
records.append(report_record)

else:
report_record = _parse_report_record(report["record"],
ip_db_path=ip_db_path,
offline=offline,
nameservers=nameservers,
dns_timeout=timeout,
parallel=parallel)
dns_timeout=timeout)
records.append(report_record)

new_report["records"] = records
Expand Down Expand Up @@ -613,7 +609,6 @@ def extract_report(input_):
def parse_aggregate_report_file(_input, offline=False, ip_db_path=None,
nameservers=None,
dns_timeout=2.0,
parallel=False,
keep_alive=None):
"""Parses a file at the given path, a file-like object. or bytes as an
aggregate DMARC report
Expand All @@ -625,7 +620,6 @@ def parse_aggregate_report_file(_input, offline=False, ip_db_path=None,
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
parallel (bool): Parallel processing
keep_alive (callable): Keep alive function
Returns:
Expand All @@ -642,7 +636,6 @@ def parse_aggregate_report_file(_input, offline=False, ip_db_path=None,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel,
keep_alive=keep_alive)


Expand Down Expand Up @@ -788,8 +781,7 @@ def parsed_aggregate_reports_to_csv(reports):
def parse_forensic_report(feedback_report, sample, msg_date,
offline=False, ip_db_path=None,
nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
parallel=False):
strip_attachment_payloads=False):
"""
Converts a DMARC forensic report and sample to a ``OrderedDict``
Expand All @@ -804,7 +796,6 @@ def parse_forensic_report(feedback_report, sample, msg_date,
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
parallel (bool): Parallel processing
Returns:
OrderedDict: A parsed report and sample
Expand Down Expand Up @@ -850,8 +841,7 @@ def parse_forensic_report(feedback_report, sample, msg_date,
ip_db_path=ip_db_path,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel)
timeout=dns_timeout)
parsed_report["source"] = parsed_report_source
del parsed_report["source_ip"]

Expand Down Expand Up @@ -979,7 +969,7 @@ def parsed_forensic_reports_to_csv(reports):
def parse_report_email(input_, offline=False, ip_db_path=None,
nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
parallel=False, keep_alive=None):
keep_alive=None):
"""
Parses a DMARC report from an email
Expand All @@ -991,7 +981,6 @@ def parse_report_email(input_, offline=False, ip_db_path=None,
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
parallel (bool): Parallel processing
keep_alive (callable): keep alive function
Returns:
Expand Down Expand Up @@ -1099,7 +1088,6 @@ def parse_report_email(input_, offline=False, ip_db_path=None,
offline=offline,
nameservers=ns,
timeout=dns_timeout,
parallel=parallel,
keep_alive=keep_alive)
result = OrderedDict([("report_type", "aggregate"),
("report", aggregate_report)])
Expand Down Expand Up @@ -1128,8 +1116,7 @@ def parse_report_email(input_, offline=False, ip_db_path=None,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=strip_attachment_payloads,
parallel=parallel)
strip_attachment_payloads=strip_attachment_payloads)
except InvalidForensicReport as e:
error = 'Message with subject "{0}" ' \
'is not a valid ' \
Expand All @@ -1150,7 +1137,7 @@ def parse_report_email(input_, offline=False, ip_db_path=None,

def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False, ip_db_path=None,
offline=False, parallel=False, keep_alive=None):
offline=False, keep_alive=None):
"""Parses a DMARC aggregate or forensic file at the given path, a
file-like object. or bytes
Expand All @@ -1163,7 +1150,6 @@ def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
forensic report results
ip_db_path (str): Path to a MMDB file from MaxMind or DBIP
offline (bool): Do not make online queries for geolocation or DNS
parallel (bool): Parallel processing
keep_alive (callable): Keep alive function
Returns:
Expand All @@ -1185,7 +1171,6 @@ def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
parallel=parallel,
keep_alive=keep_alive)
results = OrderedDict([("report_type", "aggregate"),
("report", report)])
Expand All @@ -1203,7 +1188,6 @@ def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa,
parallel=parallel,
keep_alive=keep_alive)
except InvalidDMARCReport:
raise ParserError("Not a valid report")
Expand All @@ -1213,8 +1197,7 @@ def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
def get_dmarc_reports_from_mbox(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
ip_db_path=None,
offline=False,
parallel=False):
offline=False):
"""Parses a mailbox in mbox format containing e-mails with attached
DMARC reports
Expand All @@ -1227,7 +1210,6 @@ def get_dmarc_reports_from_mbox(input_, nameservers=None, dns_timeout=2.0,
forensic report results
ip_db_path (str): Path to a MMDB file from MaxMind or DBIP
offline (bool): Do not make online queries for geolocation or DNS
parallel (bool): Parallel processing
Returns:
OrderedDict: Lists of ``aggregate_reports`` and ``forensic_reports``
Expand Down Expand Up @@ -1255,8 +1237,7 @@ def get_dmarc_reports_from_mbox(input_, nameservers=None, dns_timeout=2.0,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa,
parallel=parallel)
strip_attachment_payloads=sa)
if parsed_email["report_type"] == "aggregate":
aggregate_reports.append(parsed_email["report"])
elif parsed_email["report_type"] == "forensic":
Expand Down
8 changes: 3 additions & 5 deletions parsedmarc/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,15 @@ def _str_to_list(s):


def cli_parse(file_path, sa, nameservers, dns_timeout,
ip_db_path, offline, conn, parallel=False):
ip_db_path, offline, conn):
"""Separated this function for multiprocessing"""
try:
file_results = parse_report_file(file_path,
ip_db_path=ip_db_path,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa,
parallel=parallel)
strip_attachment_payloads=sa)
conn.send([file_results, file_path])
except ParserError as error:
conn.send([error, file_path])
Expand Down Expand Up @@ -1196,8 +1195,7 @@ def process_reports(reports_):
dns_timeout=opts.dns_timeout,
strip_attachment_payloads=strip,
ip_db_path=opts.ip_db_path,
offline=opts.offline,
parallel=False)
offline=opts.offline)
aggregate_reports += reports["aggregate_reports"]
forensic_reports += reports["forensic_reports"]
smtp_tls_reports += reports["smtp_tls_reports"]
Expand Down
3 changes: 1 addition & 2 deletions parsedmarc/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ def get_ip_address_country(ip_address, db_path=None):


def get_ip_address_info(ip_address, ip_db_path=None, cache=None, offline=False,
nameservers=None, timeout=2.0, parallel=False):
nameservers=None, timeout=2.0):
"""
Returns reverse DNS and country information for the given IP address
Expand All @@ -306,7 +306,6 @@ def get_ip_address_info(ip_address, ip_db_path=None, cache=None, offline=False,
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
parallel (bool): parallel processing
Returns:
OrderedDict: ``ip_address``, ``reverse_dns``
Expand Down

0 comments on commit 2cde116

Please sign in to comment.