[v2] location-importer.in: Conduct sanity checks per DROP list

Message ID a4378944-5729-231c-2428-529855ea2479@ipfire.org
State Accepted
Commit 5acac2a4060ec00d88eedadafe9f677be08e9528
Headers
Series [v2] location-importer.in: Conduct sanity checks per DROP list |

Commit Message

Peter Müller Sept. 26, 2022, 6:26 p.m. UTC
  Previously, the lack of distinction between different DROP lists caused
only the last one to be persisted. The second version of this patch
incorporates suggestions from Michael on the first version.

Tested-by: Peter Müller <peter.mueller@ipfire.org>
Signed-off-by: Peter Müller <peter.mueller@ipfire.org>
---
 src/scripts/location-importer.in | 74 +++++++++++++++++++-------------
 1 file changed, 44 insertions(+), 30 deletions(-)
  

Comments

Michael Tremer Sept. 27, 2022, 9:17 a.m. UTC | #1
Hello,

This looks a lot more Pythonic and okay to me.

I will merge this shortly.

-Michael

> On 26 Sep 2022, at 19:26, Peter Müller <peter.mueller@ipfire.org> wrote:
> 
> Previously, the lack of distinction between different DROP lists caused
> only the last one to be persisted. The second version of this patch
> incorporates suggestions from Michael on the first version.
> 
> Tested-by: Peter Müller <peter.mueller@ipfire.org>
> Signed-off-by: Peter Müller <peter.mueller@ipfire.org>
> ---
> src/scripts/location-importer.in | 74 +++++++++++++++++++-------------
> 1 file changed, 44 insertions(+), 30 deletions(-)
> 
> diff --git a/src/scripts/location-importer.in b/src/scripts/location-importer.in
> index 8d47497..d405eb2 100644
> --- a/src/scripts/location-importer.in
> +++ b/src/scripts/location-importer.in
> @@ -1427,37 +1427,37 @@ class CLI(object):
> 	def _update_overrides_for_spamhaus_drop(self):
> 		downloader = location.importer.Downloader()
> 
> -		ip_urls = [
> -					"https://www.spamhaus.org/drop/drop.txt",
> -					"https://www.spamhaus.org/drop/edrop.txt",
> -					"https://www.spamhaus.org/drop/dropv6.txt"
> +		ip_lists = [
> +					("SPAMHAUS-DROP", "https://www.spamhaus.org/drop/drop.txt"),
> +					("SPAMHAUS-EDROP", "https://www.spamhaus.org/drop/edrop.txt"),
> +					("SPAMHAUS-DROPV6", "https://www.spamhaus.org/drop/dropv6.txt")
> 				]
> 
> -		asn_urls = [
> -					"https://www.spamhaus.org/drop/asndrop.txt"
> +		asn_lists = [
> +					("SPAMHAUS-ASNDROP", "https://www.spamhaus.org/drop/asndrop.txt")
> 				]
> 
> -		for url in ip_urls:
> -			# Fetch IP list
> +		for name, url in ip_lists:
> +			# Fetch IP list from given URL
> 			f = downloader.retrieve(url)
> 
> 			# Split into lines
> 			fcontent = f.readlines()
> 
> -			# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
> -			# downloads.
> -			if len(fcontent) > 10:
> -				self.db.execute("""
> -					DELETE FROM autnum_overrides WHERE source = 'Spamhaus ASN-DROP list';
> -					DELETE FROM network_overrides WHERE source = 'Spamhaus DROP lists';
> -				""")
> -			else:
> -				log.error("Spamhaus DROP URL %s returned likely bogus file, ignored" % url)
> -				continue
> -
> -			# Iterate through every line, filter comments and add remaining networks to
> -			# the override table in case they are valid...
> 			with self.db.transaction():
> +				# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
> +				# downloads.
> +				if len(fcontent) > 10:
> +					self.db.execute("""
> +						DELETE FROM network_overrides WHERE source = '%s';
> +					""" % name,
> +					)
> +				else:
> +					log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
> +					continue
> +
> +				# Iterate through every line, filter comments and add remaining networks to
> +				# the override table in case they are valid...
> 				for sline in fcontent:
> 					# The response is assumed to be encoded in UTF-8...
> 					sline = sline.decode("utf-8")
> @@ -1475,8 +1475,8 @@ class CLI(object):
> 
> 					# Sanitize parsed networks...
> 					if not self._check_parsed_network(network):
> -						log.warning("Skipping bogus network found in Spamhaus DROP URL %s: %s" % \
> -							(url, network))
> +						log.warning("Skipping bogus network found in %s (%s): %s" % \
> +							(name, url, network))
> 						continue
> 
> 					# Conduct SQL statement...
> @@ -1488,17 +1488,31 @@ class CLI(object):
> 						) VALUES (%s, %s, %s)
> 						ON CONFLICT (network) DO UPDATE SET is_drop = True""",
> 						"%s" % network,
> -						"Spamhaus DROP lists",
> +						name,
> 						True
> 					)
> 
> -		for url in asn_urls:
> +		for name, url in asn_lists:
> 			# Fetch URL
> 			f = downloader.retrieve(url)
> 
> -			# Iterate through every line, filter comments and add remaining ASNs to
> -			# the override table in case they are valid...
> +			# Split into lines
> +			fcontent = f.readlines()
> +
> 			with self.db.transaction():
> +				# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
> +				# downloads.
> +				if len(fcontent) > 10:
> +					self.db.execute("""
> +						DELETE FROM autnum_overrides WHERE source = '%s';
> +					""" % name,
> +					)
> +				else:
> +					log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
> +					continue
> +
> +				# Iterate through every line, filter comments and add remaining ASNs to
> +				# the override table in case they are valid...
> 				for sline in f.readlines():
> 					# The response is assumed to be encoded in UTF-8...
> 					sline = sline.decode("utf-8")
> @@ -1518,8 +1532,8 @@ class CLI(object):
> 
> 					# Filter invalid ASNs...
> 					if not self._check_parsed_asn(asn):
> -						log.warning("Skipping bogus ASN found in Spamhaus DROP URL %s: %s" % \
> -							(url, asn))
> +						log.warning("Skipping bogus ASN found in %s (%s): %s" % \
> +							(name, url, asn))
> 						continue
> 
> 					# Conduct SQL statement...
> @@ -1531,7 +1545,7 @@ class CLI(object):
> 						) VALUES (%s, %s, %s)
> 						ON CONFLICT (number) DO UPDATE SET is_drop = True""",
> 						"%s" % asn,
> -						"Spamhaus ASN-DROP list",
> +						name,
> 						True
> 					)
> 
> -- 
> 2.35.3
  

Patch

diff --git a/src/scripts/location-importer.in b/src/scripts/location-importer.in
index 8d47497..d405eb2 100644
--- a/src/scripts/location-importer.in
+++ b/src/scripts/location-importer.in
@@ -1427,37 +1427,37 @@  class CLI(object):
 	def _update_overrides_for_spamhaus_drop(self):
 		downloader = location.importer.Downloader()
 
-		ip_urls = [
-					"https://www.spamhaus.org/drop/drop.txt",
-					"https://www.spamhaus.org/drop/edrop.txt",
-					"https://www.spamhaus.org/drop/dropv6.txt"
+		ip_lists = [
+					("SPAMHAUS-DROP", "https://www.spamhaus.org/drop/drop.txt"),
+					("SPAMHAUS-EDROP", "https://www.spamhaus.org/drop/edrop.txt"),
+					("SPAMHAUS-DROPV6", "https://www.spamhaus.org/drop/dropv6.txt")
 				]
 
-		asn_urls = [
-					"https://www.spamhaus.org/drop/asndrop.txt"
+		asn_lists = [
+					("SPAMHAUS-ASNDROP", "https://www.spamhaus.org/drop/asndrop.txt")
 				]
 
-		for url in ip_urls:
-			# Fetch IP list
+		for name, url in ip_lists:
+			# Fetch IP list from given URL
 			f = downloader.retrieve(url)
 
 			# Split into lines
 			fcontent = f.readlines()
 
-			# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
-			# downloads.
-			if len(fcontent) > 10:
-				self.db.execute("""
-					DELETE FROM autnum_overrides WHERE source = 'Spamhaus ASN-DROP list';
-					DELETE FROM network_overrides WHERE source = 'Spamhaus DROP lists';
-				""")
-			else:
-				log.error("Spamhaus DROP URL %s returned likely bogus file, ignored" % url)
-				continue
-
-			# Iterate through every line, filter comments and add remaining networks to
-			# the override table in case they are valid...
 			with self.db.transaction():
+				# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+				# downloads.
+				if len(fcontent) > 10:
+					self.db.execute("""
+						DELETE FROM network_overrides WHERE source = '%s';
+					""" % name,
+					)
+				else:
+					log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+					continue
+
+				# Iterate through every line, filter comments and add remaining networks to
+				# the override table in case they are valid...
 				for sline in fcontent:
 					# The response is assumed to be encoded in UTF-8...
 					sline = sline.decode("utf-8")
@@ -1475,8 +1475,8 @@  class CLI(object):
 
 					# Sanitize parsed networks...
 					if not self._check_parsed_network(network):
-						log.warning("Skipping bogus network found in Spamhaus DROP URL %s: %s" % \
-							(url, network))
+						log.warning("Skipping bogus network found in %s (%s): %s" % \
+							(name, url, network))
 						continue
 
 					# Conduct SQL statement...
@@ -1488,17 +1488,31 @@  class CLI(object):
 						) VALUES (%s, %s, %s)
 						ON CONFLICT (network) DO UPDATE SET is_drop = True""",
 						"%s" % network,
-						"Spamhaus DROP lists",
+						name,
 						True
 					)
 
-		for url in asn_urls:
+		for name, url in asn_lists:
 			# Fetch URL
 			f = downloader.retrieve(url)
 
-			# Iterate through every line, filter comments and add remaining ASNs to
-			# the override table in case they are valid...
+			# Split into lines
+			fcontent = f.readlines()
+
 			with self.db.transaction():
+				# Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+				# downloads.
+				if len(fcontent) > 10:
+					self.db.execute("""
+						DELETE FROM autnum_overrides WHERE source = '%s';
+					""" % name,
+					)
+				else:
+					log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+					continue
+
+				# Iterate through every line, filter comments and add remaining ASNs to
+				# the override table in case they are valid...
 				for sline in f.readlines():
 					# The response is assumed to be encoded in UTF-8...
 					sline = sline.decode("utf-8")
@@ -1518,8 +1532,8 @@  class CLI(object):
 
 					# Filter invalid ASNs...
 					if not self._check_parsed_asn(asn):
-						log.warning("Skipping bogus ASN found in Spamhaus DROP URL %s: %s" % \
-							(url, asn))
+						log.warning("Skipping bogus ASN found in %s (%s): %s" % \
+							(name, url, asn))
 						continue
 
 					# Conduct SQL statement...
@@ -1531,7 +1545,7 @@  class CLI(object):
 						) VALUES (%s, %s, %s)
 						ON CONFLICT (number) DO UPDATE SET is_drop = True""",
 						"%s" % asn,
-						"Spamhaus ASN-DROP list",
+						name,
 						True
 					)