]> git.ipfire.org Git - location/libloc.git/commitdiff
location-importer.in: Conduct sanity checks per DROP list
authorPeter Müller <peter.mueller@ipfire.org>
Mon, 26 Sep 2022 18:26:44 +0000 (18:26 +0000)
committerMichael Tremer <michael.tremer@ipfire.org>
Tue, 27 Sep 2022 09:17:46 +0000 (09:17 +0000)
Previously, the lack of distinction between different DROP lists caused
only the last one to be persisted. The second version of this patch
incorporates suggestions from Michael on the first version.

Tested-by: Peter Müller <peter.mueller@ipfire.org>
Signed-off-by: Peter Müller <peter.mueller@ipfire.org>
Signed-off-by: Michael Tremer <michael.tremer@ipfire.org>
src/scripts/location-importer.in

index 8d4749709ea72acd8ebd1396b25f165251f84167..d405eb2bc42d8a9cda1c275777336b9de6c60cb6 100644 (file)
@@ -1427,37 +1427,37 @@ class CLI(object):
        def _update_overrides_for_spamhaus_drop(self):
                downloader = location.importer.Downloader()
 
-               ip_urls = [
-                                       "https://www.spamhaus.org/drop/drop.txt",
-                                       "https://www.spamhaus.org/drop/edrop.txt",
-                                       "https://www.spamhaus.org/drop/dropv6.txt"
+               ip_lists = [
+                                       ("SPAMHAUS-DROP", "https://www.spamhaus.org/drop/drop.txt"),
+                                       ("SPAMHAUS-EDROP", "https://www.spamhaus.org/drop/edrop.txt"),
+                                       ("SPAMHAUS-DROPV6", "https://www.spamhaus.org/drop/dropv6.txt")
                                ]
 
-               asn_urls = [
-                                       "https://www.spamhaus.org/drop/asndrop.txt"
+               asn_lists = [
+                                       ("SPAMHAUS-ASNDROP", "https://www.spamhaus.org/drop/asndrop.txt")
                                ]
 
-               for url in ip_urls:
-                       # Fetch IP list
+               for name, url in ip_lists:
+                       # Fetch IP list from given URL
                        f = downloader.retrieve(url)
 
                        # Split into lines
                        fcontent = f.readlines()
 
-                       # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
-                       # downloads.
-                       if len(fcontent) > 10:
-                               self.db.execute("""
-                                       DELETE FROM autnum_overrides WHERE source = 'Spamhaus ASN-DROP list';
-                                       DELETE FROM network_overrides WHERE source = 'Spamhaus DROP lists';
-                               """)
-                       else:
-                               log.error("Spamhaus DROP URL %s returned likely bogus file, ignored" % url)
-                               continue
-
-                       # Iterate through every line, filter comments and add remaining networks to
-                       # the override table in case they are valid...
                        with self.db.transaction():
+                               # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+                               # downloads.
+                               if len(fcontent) > 10:
+                                       self.db.execute("""
+                                               DELETE FROM network_overrides WHERE source = '%s';
+                                       """ % name,
+                                       )
+                               else:
+                                       log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+                                       continue
+
+                               # Iterate through every line, filter comments and add remaining networks to
+                               # the override table in case they are valid...
                                for sline in fcontent:
                                        # The response is assumed to be encoded in UTF-8...
                                        sline = sline.decode("utf-8")
@@ -1475,8 +1475,8 @@ class CLI(object):
 
                                        # Sanitize parsed networks...
                                        if not self._check_parsed_network(network):
-                                               log.warning("Skipping bogus network found in Spamhaus DROP URL %s: %s" % \
-                                                       (url, network))
+                                               log.warning("Skipping bogus network found in %s (%s): %s" % \
+                                                       (name, url, network))
                                                continue
 
                                        # Conduct SQL statement...
@@ -1488,17 +1488,31 @@ class CLI(object):
                                                ) VALUES (%s, %s, %s)
                                                ON CONFLICT (network) DO UPDATE SET is_drop = True""",
                                                "%s" % network,
-                                               "Spamhaus DROP lists",
+                                               name,
                                                True
                                        )
 
-               for url in asn_urls:
+               for name, url in asn_lists:
                        # Fetch URL
                        f = downloader.retrieve(url)
 
-                       # Iterate through every line, filter comments and add remaining ASNs to
-                       # the override table in case they are valid...
+                       # Split into lines
+                       fcontent = f.readlines()
+
                        with self.db.transaction():
+                               # Conduct a very basic sanity check to rule out CDN issues causing bogus DROP
+                               # downloads.
+                               if len(fcontent) > 10:
+                                       self.db.execute("""
+                                               DELETE FROM autnum_overrides WHERE source = '%s';
+                                       """ % name,
+                                       )
+                               else:
+                                       log.error("%s (%s) returned likely bogus file, ignored" % (name, url))
+                                       continue
+
+                               # Iterate through every line, filter comments and add remaining ASNs to
+                               # the override table in case they are valid...
                                for sline in f.readlines():
                                        # The response is assumed to be encoded in UTF-8...
                                        sline = sline.decode("utf-8")
@@ -1518,8 +1532,8 @@ class CLI(object):
 
                                        # Filter invalid ASNs...
                                        if not self._check_parsed_asn(asn):
-                                               log.warning("Skipping bogus ASN found in Spamhaus DROP URL %s: %s" % \
-                                                       (url, asn))
+                                               log.warning("Skipping bogus ASN found in %s (%s): %s" % \
+                                                       (name, url, asn))
                                                continue
 
                                        # Conduct SQL statement...
@@ -1531,7 +1545,7 @@ class CLI(object):
                                                ) VALUES (%s, %s, %s)
                                                ON CONFLICT (number) DO UPDATE SET is_drop = True""",
                                                "%s" % asn,
-                                               "Spamhaus ASN-DROP list",
+                                               name,
                                                True
                                        )