2 ###############################################################################
4 # libloc - A library to determine the location of someone on the Internet #
6 # Copyright (C) 2020-2024 IPFire Development Team <info@ipfire.org> #
8 # This library is free software; you can redistribute it and/or #
9 # modify it under the terms of the GNU Lesser General Public #
10 # License as published by the Free Software Foundation; either #
11 # version 2.1 of the License, or (at your option) any later version. #
13 # This library is distributed in the hope that it will be useful, #
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
16 # Lesser General Public License for more details. #
18 ###############################################################################
21 import concurrent
.futures
34 # Load our location module
36 import location
.database
37 import location
.importer
38 from location
.i18n
import _
41 log
= logging
.getLogger("location.importer")
51 TRANSLATED_COUNTRIES
= {
52 # When people say UK, they mean GB
56 IGNORED_COUNTRIES
= set((
60 # Some people use ZZ to say "no country" or to hide the country
64 # Configure the CSV parser for ARIN
65 csv
.register_dialect("arin", delimiter
=",", quoting
=csv
.QUOTE_ALL
, quotechar
="\"")
69 parser
= argparse
.ArgumentParser(
70 description
=_("Location Importer Command Line Interface"),
72 subparsers
= parser
.add_subparsers()
74 # Global configuration flags
75 parser
.add_argument("--debug", action
="store_true",
76 help=_("Enable debug output"))
77 parser
.add_argument("--quiet", action
="store_true",
78 help=_("Enable quiet mode"))
81 parser
.add_argument("--version", action
="version",
82 version
="%(prog)s @VERSION@")
85 parser
.add_argument("--database-host", required
=True,
86 help=_("Database Hostname"), metavar
=_("HOST"))
87 parser
.add_argument("--database-name", required
=True,
88 help=_("Database Name"), metavar
=_("NAME"))
89 parser
.add_argument("--database-username", required
=True,
90 help=_("Database Username"), metavar
=_("USERNAME"))
91 parser
.add_argument("--database-password", required
=True,
92 help=_("Database Password"), metavar
=_("PASSWORD"))
95 write
= subparsers
.add_parser("write", help=_("Write database to file"))
96 write
.set_defaults(func
=self
.handle_write
)
97 write
.add_argument("file", nargs
=1, help=_("Database File"))
98 write
.add_argument("--signing-key", nargs
="?", type=open, help=_("Signing Key"))
99 write
.add_argument("--backup-signing-key", nargs
="?", type=open, help=_("Backup Signing Key"))
100 write
.add_argument("--vendor", nargs
="?", help=_("Sets the vendor"))
101 write
.add_argument("--description", nargs
="?", help=_("Sets a description"))
102 write
.add_argument("--license", nargs
="?", help=_("Sets the license"))
103 write
.add_argument("--version", type=int, help=_("Database Format Version"))
106 update_whois
= subparsers
.add_parser("update-whois", help=_("Update WHOIS Information"))
107 update_whois
.set_defaults(func
=self
.handle_update_whois
)
109 # Update announcements
110 update_announcements
= subparsers
.add_parser("update-announcements",
111 help=_("Update BGP Annoucements"))
112 update_announcements
.set_defaults(func
=self
.handle_update_announcements
)
113 update_announcements
.add_argument("server", nargs
=1,
114 help=_("Route Server to connect to"), metavar
=_("SERVER"))
117 update_geofeeds
= subparsers
.add_parser("update-geofeeds",
118 help=_("Update Geofeeds"))
119 update_geofeeds
.set_defaults(func
=self
.handle_update_geofeeds
)
122 update_feeds
= subparsers
.add_parser("update-feeds",
123 help=_("Update Feeds"))
124 update_feeds
.add_argument("feeds", nargs
="*",
125 help=_("Only update these feeds"))
126 update_feeds
.set_defaults(func
=self
.handle_update_feeds
)
129 update_overrides
= subparsers
.add_parser("update-overrides",
130 help=_("Update overrides"),
132 update_overrides
.add_argument(
133 "files", nargs
="+", help=_("Files to import"),
135 update_overrides
.set_defaults(func
=self
.handle_update_overrides
)
138 import_countries
= subparsers
.add_parser("import-countries",
139 help=_("Import countries"),
141 import_countries
.add_argument("file", nargs
=1, type=argparse
.FileType("r"),
142 help=_("File to import"))
143 import_countries
.set_defaults(func
=self
.handle_import_countries
)
145 args
= parser
.parse_args()
149 location
.logger
.set_level(logging
.DEBUG
)
151 location
.logger
.set_level(logging
.WARNING
)
153 # Print usage if no action was given
154 if not "func" in args
:
161 # Parse command line arguments
162 args
= self
.parse_cli()
164 # Initialise database
165 self
.db
= self
._setup
_database
(args
)
168 ret
= args
.func(args
)
170 # Return with exit code
174 # Otherwise just exit
177 def _setup_database(self
, ns
):
179 Initialise the database
181 # Connect to database
182 db
= location
.database
.Connection(
183 host
=ns
.database_host
, database
=ns
.database_name
,
184 user
=ns
.database_username
, password
=ns
.database_password
,
187 with db
.transaction():
190 CREATE TABLE IF NOT EXISTS announcements(network inet, autnum bigint,
191 first_seen_at timestamp without time zone DEFAULT CURRENT_TIMESTAMP,
192 last_seen_at timestamp without time zone DEFAULT CURRENT_TIMESTAMP);
193 CREATE UNIQUE INDEX IF NOT EXISTS announcements_networks ON announcements(network);
194 CREATE INDEX IF NOT EXISTS announcements_family ON announcements(family(network));
195 CREATE INDEX IF NOT EXISTS announcements_search ON announcements USING GIST(network inet_ops);
198 CREATE TABLE IF NOT EXISTS autnums(number bigint, name text NOT NULL);
199 ALTER TABLE autnums ADD COLUMN IF NOT EXISTS source text;
200 CREATE UNIQUE INDEX IF NOT EXISTS autnums_number ON autnums(number);
203 CREATE TABLE IF NOT EXISTS countries(
204 country_code text NOT NULL, name text NOT NULL, continent_code text NOT NULL);
205 CREATE UNIQUE INDEX IF NOT EXISTS countries_country_code ON countries(country_code);
208 CREATE TABLE IF NOT EXISTS networks(network inet, country text);
209 ALTER TABLE networks ADD COLUMN IF NOT EXISTS original_countries text[];
210 ALTER TABLE networks ADD COLUMN IF NOT EXISTS source text;
211 CREATE UNIQUE INDEX IF NOT EXISTS networks_network ON networks(network);
212 CREATE INDEX IF NOT EXISTS networks_family ON networks USING BTREE(family(network));
213 CREATE INDEX IF NOT EXISTS networks_search ON networks USING GIST(network inet_ops);
216 CREATE TABLE IF NOT EXISTS geofeeds(
217 id serial primary key,
219 status integer default null,
220 updated_at timestamp without time zone default null
222 ALTER TABLE geofeeds ADD COLUMN IF NOT EXISTS error text;
223 CREATE UNIQUE INDEX IF NOT EXISTS geofeeds_unique
225 CREATE TABLE IF NOT EXISTS geofeed_networks(
226 geofeed_id integer references geofeeds(id) on delete cascade,
232 CREATE INDEX IF NOT EXISTS geofeed_networks_geofeed_id
233 ON geofeed_networks(geofeed_id);
234 CREATE INDEX IF NOT EXISTS geofeed_networks_search
235 ON geofeed_networks USING GIST(network inet_ops);
236 CREATE TABLE IF NOT EXISTS network_geofeeds(network inet, url text);
237 CREATE UNIQUE INDEX IF NOT EXISTS network_geofeeds_unique
238 ON network_geofeeds(network);
239 CREATE INDEX IF NOT EXISTS network_geofeeds_search
240 ON network_geofeeds USING GIST(network inet_ops);
241 CREATE INDEX IF NOT EXISTS network_geofeeds_url
242 ON network_geofeeds(url);
245 CREATE TABLE IF NOT EXISTS autnum_feeds(
246 number bigint NOT NULL,
247 source text NOT NULL,
250 is_anonymous_proxy boolean,
251 is_satellite_provider boolean,
255 CREATE UNIQUE INDEX IF NOT EXISTS autnum_feeds_unique
256 ON autnum_feeds(number, source);
258 CREATE TABLE IF NOT EXISTS network_feeds(
259 network inet NOT NULL,
260 source text NOT NULL,
262 is_anonymous_proxy boolean,
263 is_satellite_provider boolean,
267 CREATE UNIQUE INDEX IF NOT EXISTS network_feeds_unique
268 ON network_feeds(network, source);
269 CREATE INDEX IF NOT EXISTS network_feeds_search
270 ON network_feeds USING GIST(network inet_ops);
273 CREATE TABLE IF NOT EXISTS autnum_overrides(
274 number bigint NOT NULL,
277 is_anonymous_proxy boolean,
278 is_satellite_provider boolean,
281 CREATE UNIQUE INDEX IF NOT EXISTS autnum_overrides_number
282 ON autnum_overrides(number);
283 ALTER TABLE autnum_overrides ADD COLUMN IF NOT EXISTS is_drop boolean;
284 ALTER TABLE autnum_overrides DROP COLUMN IF EXISTS source;
286 CREATE TABLE IF NOT EXISTS network_overrides(
287 network inet NOT NULL,
289 is_anonymous_proxy boolean,
290 is_satellite_provider boolean,
293 CREATE UNIQUE INDEX IF NOT EXISTS network_overrides_network
294 ON network_overrides(network);
295 CREATE INDEX IF NOT EXISTS network_overrides_search
296 ON network_overrides USING GIST(network inet_ops);
297 ALTER TABLE network_overrides ADD COLUMN IF NOT EXISTS is_drop boolean;
298 ALTER TABLE network_overrides DROP COLUMN IF EXISTS source;
303 def fetch_countries(self
):
305 Returns a list of all countries on the list
307 # Fetch all valid country codes to check parsed networks aganist...
308 countries
= self
.db
.query("SELECT country_code FROM countries ORDER BY country_code")
310 return set((country
.country_code
for country
in countries
))
312 def handle_write(self
, ns
):
314 Compiles a database in libloc format out of what is in the database
317 writer
= location
.Writer(ns
.signing_key
, ns
.backup_signing_key
)
321 writer
.vendor
= ns
.vendor
324 writer
.description
= ns
.description
327 writer
.license
= ns
.license
329 # Add all Autonomous Systems
330 log
.info("Writing Autonomous Systems...")
332 # Select all ASes with a name
333 rows
= self
.db
.query("""
335 autnums.number AS number,
343 autnum_overrides overrides ON autnums.number = overrides.number
349 # Skip AS without names
353 a
= writer
.add_as(row
.number
)
357 log
.info("Writing networks...")
359 # Select all known networks
360 rows
= self
.db
.query("""
361 WITH known_networks AS (
362 SELECT network FROM announcements
364 SELECT network FROM networks
366 SELECT network FROM network_feeds
368 SELECT network FROM network_overrides
370 SELECT network FROM geofeed_networks
373 ordered_networks AS (
375 known_networks.network AS network,
376 announcements.autnum AS autnum,
377 networks.country AS country,
379 -- Must be part of returned values for ORDER BY clause
380 masklen(announcements.network) AS sort_a,
381 masklen(networks.network) AS sort_b
385 announcements ON known_networks.network <<= announcements.network
387 networks ON known_networks.network <<= networks.network
389 known_networks.network,
394 -- Return a list of those networks enriched with all
395 -- other information that we store in the database
397 DISTINCT ON (network)
407 network_overrides overrides
409 networks.network <<= overrides.network
411 masklen(overrides.network) DESC
418 autnum_overrides overrides
420 networks.autnum = overrides.number
428 networks.network <<= feeds.network
430 masklen(feeds.network) DESC
439 networks.autnum = feeds.number
446 geofeed_networks.country AS country
450 -- Join the data from the geofeeds
452 geofeeds ON network_geofeeds.url = geofeeds.url
454 geofeed_networks ON geofeeds.id = geofeed_networks.geofeed_id
456 -- Check whether we have a geofeed for this network
458 networks.network <<= network_geofeeds.network
460 networks.network <<= geofeed_networks.network
462 -- Filter for the best result
464 masklen(geofeed_networks.network) DESC
476 network_overrides overrides
478 networks.network <<= overrides.network
480 masklen(overrides.network) DESC
489 networks.network <<= feeds.network
491 masklen(feeds.network) DESC
500 networks.autnum = feeds.number
509 autnum_overrides overrides
511 networks.autnum = overrides.number
514 ) AS is_anonymous_proxy,
518 is_satellite_provider
520 network_overrides overrides
522 networks.network <<= overrides.network
524 masklen(overrides.network) DESC
529 is_satellite_provider
533 networks.network <<= feeds.network
535 masklen(feeds.network) DESC
540 is_satellite_provider
544 networks.autnum = feeds.number
551 is_satellite_provider
553 autnum_overrides overrides
555 networks.autnum = overrides.number
558 ) AS is_satellite_provider,
564 network_overrides overrides
566 networks.network <<= overrides.network
568 masklen(overrides.network) DESC
577 networks.network <<= feeds.network
579 masklen(feeds.network) DESC
588 networks.autnum = feeds.number
597 autnum_overrides overrides
599 networks.autnum = overrides.number
608 network_overrides overrides
610 networks.network <<= overrides.network
612 masklen(overrides.network) DESC
621 networks.network <<= feeds.network
623 masklen(feeds.network) DESC
632 networks.autnum = feeds.number
641 autnum_overrides overrides
643 networks.autnum = overrides.number
648 ordered_networks networks
652 network
= writer
.add_network(row
.network
)
656 network
.country_code
= row
.country
660 network
.asn
= row
.autnum
663 if row
.is_anonymous_proxy
:
664 network
.set_flag(location
.NETWORK_FLAG_ANONYMOUS_PROXY
)
666 if row
.is_satellite_provider
:
667 network
.set_flag(location
.NETWORK_FLAG_SATELLITE_PROVIDER
)
670 network
.set_flag(location
.NETWORK_FLAG_ANYCAST
)
673 network
.set_flag(location
.NETWORK_FLAG_DROP
)
676 log
.info("Writing countries...")
677 rows
= self
.db
.query("SELECT * FROM countries ORDER BY country_code")
680 c
= writer
.add_country(row
.country_code
)
681 c
.continent_code
= row
.continent_code
684 # Write everything to file
685 log
.info("Writing database to file...")
689 def handle_update_whois(self
, ns
):
690 downloader
= location
.importer
.Downloader()
692 # Did we run successfully?
695 # Fetch all valid country codes to check parsed networks against
696 countries
= self
.fetch_countries()
698 # Check if we have countries
700 log
.error("Please import countries before importing any WHOIS data")
703 # Iterate over all potential sources
704 for source
in sorted(location
.importer
.SOURCES
):
705 with self
.db
.transaction():
706 # Create some temporary tables to store parsed data
708 CREATE TEMPORARY TABLE _autnums(number integer NOT NULL,
709 organization text NOT NULL, source text NOT NULL) ON COMMIT DROP;
710 CREATE UNIQUE INDEX _autnums_number ON _autnums(number);
712 CREATE TEMPORARY TABLE _organizations(handle text NOT NULL,
713 name text NOT NULL, source text NOT NULL) ON COMMIT DROP;
714 CREATE UNIQUE INDEX _organizations_handle ON _organizations(handle);
716 CREATE TEMPORARY TABLE _rirdata(network inet NOT NULL, country text,
717 original_countries text[] NOT NULL, source text NOT NULL)
719 CREATE INDEX _rirdata_search ON _rirdata
720 USING BTREE(family(network), masklen(network));
721 CREATE UNIQUE INDEX _rirdata_network ON _rirdata(network);
724 # Remove all previously imported content
725 self
.db
.execute("DELETE FROM autnums WHERE source = %s", source
)
726 self
.db
.execute("DELETE FROM networks WHERE source = %s", source
)
729 # Fetch WHOIS sources
730 for url
in location
.importer
.WHOIS_SOURCES
.get(source
, []):
731 for block
in downloader
.request_blocks(url
):
732 self
._parse
_block
(block
, source
, countries
)
734 # Fetch extended sources
735 for url
in location
.importer
.EXTENDED_SOURCES
.get(source
, []):
736 for line
in downloader
.request_lines(url
):
737 self
._parse
_line
(line
, source
, countries
)
738 except urllib
.error
.URLError
as e
:
739 log
.error("Could not retrieve data from %s: %s" % (source
, e
))
742 # Continue with the next source
745 # Process all parsed networks from every RIR we happen to have access to,
746 # insert the largest network chunks into the networks table immediately...
747 families
= self
.db
.query("""
749 family(network) AS family
757 for family
in (row
.family
for row
in families
):
758 # Fetch the smallest mask length in our data set
759 smallest
= self
.db
.get("""
789 masklen(network) = %s
798 # ... determine any other prefixes for this network family, ...
799 prefixes
= self
.db
.query("""
801 DISTINCT masklen(network) AS prefix
812 # ... and insert networks with this prefix in case they provide additional
813 # information (i. e. subnet of a larger chunk with a different country)
814 for prefix
in (row
.prefix
for row
in prefixes
):
820 _rirdata.original_countries,
825 family(_rirdata.network) = %s
827 masklen(_rirdata.network) = %s
831 DISTINCT ON (c.network)
834 c.original_countries,
836 masklen(networks.network),
837 networks.country AS parent_country
843 c.network << networks.network
846 masklen(networks.network) DESC NULLS LAST
849 networks(network, country, original_countries, source)
858 parent_country IS NULL
860 country <> parent_country
861 ON CONFLICT DO NOTHING
876 _organizations.source
880 _organizations ON _autnums.organization = _organizations.handle
886 SET name = excluded.name
890 # Download and import (technical) AS names from ARIN
891 with self
.db
.transaction():
892 self
._import
_as
_names
_from
_arin
(downloader
)
894 # Return a non-zero exit code for errors
895 return 1 if error
else 0
897 def _check_parsed_network(self
, network
):
899 Assistive function to detect and subsequently sort out parsed
900 networks from RIR data (both Whois and so-called "extended sources"),
903 (a) not globally routable (RFC 1918 space, et al.)
904 (b) covering a too large chunk of the IP address space (prefix length
905 is < 7 for IPv4 networks, and < 10 for IPv6)
906 (c) "0.0.0.0" or "::" as a network address
907 (d) are too small for being publicly announced (we have decided not to
908 process them at the moment, as they significantly enlarge our
909 database without providing very helpful additional information)
911 This unfortunately is necessary due to brain-dead clutter across
912 various RIR databases, causing mismatches and eventually disruptions.
914 We will return False in case a network is not suitable for adding
915 it to our database, and True otherwise.
918 if not network
or not (isinstance(network
, ipaddress
.IPv4Network
) or isinstance(network
, ipaddress
.IPv6Network
)):
921 if not network
.is_global
:
922 log
.debug("Skipping non-globally routable network: %s" % network
)
925 if network
.version
== 4:
926 if network
.prefixlen
< 7:
927 log
.debug("Skipping too big IP chunk: %s" % network
)
930 if network
.prefixlen
> 24:
931 log
.debug("Skipping network too small to be publicly announced: %s" % network
)
934 if str(network
.network_address
) == "0.0.0.0":
935 log
.debug("Skipping network based on 0.0.0.0: %s" % network
)
938 elif network
.version
== 6:
939 if network
.prefixlen
< 10:
940 log
.debug("Skipping too big IP chunk: %s" % network
)
943 if network
.prefixlen
> 48:
944 log
.debug("Skipping network too small to be publicly announced: %s" % network
)
947 if str(network
.network_address
) == "::":
948 log
.debug("Skipping network based on '::': %s" % network
)
952 # This should not happen...
953 log
.warning("Skipping network of unknown family, this should not happen: %s" % network
)
956 # In case we have made it here, the network is considered to
957 # be suitable for libloc consumption...
960 def _check_parsed_asn(self
, asn
):
962 Assistive function to filter Autonomous System Numbers not being suitable
963 for adding to our database. Returns False in such cases, and True otherwise.
966 for start
, end
in VALID_ASN_RANGES
:
967 if start
<= asn
and end
>= asn
:
970 log
.info("Supplied ASN %s out of publicly routable ASN ranges" % asn
)
973 def _parse_block(self
, block
, source_key
, countries
):
974 # Get first line to find out what type of block this is
978 if line
.startswith("aut-num:"):
979 return self
._parse
_autnum
_block
(block
, source_key
)
982 if line
.startswith("inet6num:") or line
.startswith("inetnum:"):
983 return self
._parse
_inetnum
_block
(block
, source_key
, countries
)
986 elif line
.startswith("organisation:"):
987 return self
._parse
_org
_block
(block
, source_key
)
989 def _parse_autnum_block(self
, block
, source_key
):
993 key
, val
= split_line(line
)
996 m
= re
.match(r
"^(AS|as)(\d+)", val
)
998 autnum
["asn"] = m
.group(2)
1001 autnum
[key
] = val
.upper()
1003 elif key
== "descr":
1004 # Save the first description line as well...
1005 if not key
in autnum
:
1008 # Skip empty objects
1009 if not autnum
or not "asn" in autnum
:
1012 # Insert a dummy organisation handle into our temporary organisations
1013 # table in case the AS does not have an organisation handle set, but
1014 # has a description (a quirk often observed in APNIC area), so we can
1015 # later display at least some string for this AS.
1016 if not "org" in autnum
:
1017 if "descr" in autnum
:
1018 autnum
["org"] = "LIBLOC-%s-ORGHANDLE" % autnum
.get("asn")
1020 self
.db
.execute("INSERT INTO _organizations(handle, name, source) \
1021 VALUES(%s, %s, %s) ON CONFLICT (handle) DO NOTHING",
1022 autnum
.get("org"), autnum
.get("descr"), source_key
,
1025 log
.warning("ASN %s neither has an organisation handle nor a description line set, omitting" % \
1029 # Insert into database
1030 self
.db
.execute("INSERT INTO _autnums(number, organization, source) \
1031 VALUES(%s, %s, %s) ON CONFLICT (number) DO UPDATE SET \
1032 organization = excluded.organization",
1033 autnum
.get("asn"), autnum
.get("org"), source_key
,
1036 def _parse_inetnum_block(self
, block
, source_key
, countries
):
1037 log
.debug("Parsing inetnum block:")
1044 key
, val
= split_line(line
)
1046 # Filter any inetnum records which are only referring to IP space
1047 # not managed by that specific RIR...
1048 if key
== "netname":
1049 if re
.match(r
"^(ERX-NETBLOCK|(AFRINIC|ARIN|LACNIC|RIPE)-CIDR-BLOCK|IANA-NETBLOCK-\d{1,3}|NON-RIPE-NCC-MANAGED-ADDRESS-BLOCK|STUB-[\d-]{3,}SLASH\d{1,2})", val
.strip()):
1050 log
.debug("Skipping record indicating historic/orphaned data: %s" % val
.strip())
1053 if key
== "inetnum":
1054 start_address
, delim
, end_address
= val
.partition("-")
1056 # Strip any excess space
1057 start_address
, end_address
= start_address
.rstrip(), end_address
.strip()
1059 # Handle "inetnum" formatting in LACNIC DB (e.g. "24.152.8/22" instead of "24.152.8.0/22")
1060 if start_address
and not (delim
or end_address
):
1062 start_address
= ipaddress
.ip_network(start_address
, strict
=False)
1064 start_address
= start_address
.split("/")
1065 ldigits
= start_address
[0].count(".")
1067 # How many octets do we need to add?
1068 # (LACNIC does not seem to have a /8 or greater assigned, so the following should suffice.)
1070 start_address
= start_address
[0] + ".0.0/" + start_address
[1]
1072 start_address
= start_address
[0] + ".0/" + start_address
[1]
1074 log
.warning("Could not recover IPv4 address from line in LACNIC DB format: %s" % line
)
1078 start_address
= ipaddress
.ip_network(start_address
, strict
=False)
1080 log
.warning("Could not parse line in LACNIC DB format: %s" % line
)
1083 # Enumerate first and last IP address of this network
1084 end_address
= start_address
[-1]
1085 start_address
= start_address
[0]
1088 # Convert to IP address
1090 start_address
= ipaddress
.ip_address(start_address
)
1091 end_address
= ipaddress
.ip_address(end_address
)
1093 log
.warning("Could not parse line: %s" % line
)
1096 inetnum
["inetnum"] = list(ipaddress
.summarize_address_range(start_address
, end_address
))
1098 elif key
== "inet6num":
1099 inetnum
[key
] = [ipaddress
.ip_network(val
, strict
=False)]
1101 elif key
== "country":
1104 # Ignore certain country codes
1105 if cc
in IGNORED_COUNTRIES
:
1106 log
.debug("Ignoring country code '%s'" % cc
)
1109 # Translate country codes
1111 cc
= TRANSLATED_COUNTRIES
[cc
]
1115 # Do we know this country?
1116 if not cc
in countries
:
1117 log
.warning("Skipping invalid country code '%s'" % cc
)
1121 inetnum
[key
].append(cc
)
1125 # Parse the geofeed attribute
1126 elif key
== "geofeed":
1127 inetnum
["geofeed"] = val
1129 # Parse geofeed when used as a remark
1130 elif key
== "remarks":
1131 m
= re
.match(r
"^(?:Geofeed)\s+(https://.*)", val
)
1133 inetnum
["geofeed"] = m
.group(1)
1135 # Skip empty objects
1139 # Iterate through all networks enumerated from above, check them for plausibility and insert
1140 # them into the database, if _check_parsed_network() succeeded
1141 for single_network
in inetnum
.get("inet6num") or inetnum
.get("inetnum"):
1142 if not self
._check
_parsed
_network
(single_network
):
1145 # Fetch the countries or use a list with an empty country
1146 countries
= inetnum
.get("country", [None])
1148 # Insert the network into the database but only use the first country code
1149 for cc
in countries
:
1163 ON CONFLICT (network)
1164 DO UPDATE SET country = excluded.country
1165 """, "%s" % single_network
, cc
, [cc
for cc
in countries
if cc
], source_key
,
1168 # If there are more than one country, we will only use the first one
1171 # Update any geofeed information
1172 geofeed
= inetnum
.get("geofeed", None)
1174 self
._parse
_geofeed
(geofeed
, single_network
)
1176 # Delete any previous geofeeds
1179 "DELETE FROM network_geofeeds WHERE network = %s", "%s" % single_network
,
1182 def _parse_geofeed(self
, url
, single_network
):
1184 url
= urllib
.parse
.urlparse(url
)
1186 # Make sure that this is a HTTPS URL
1187 if not url
.scheme
== "https":
1188 log
.debug("Geofeed URL is not using HTTPS: %s" % geofeed
)
1191 # Put the URL back together normalized
1194 # Store/update any geofeeds
1204 ON CONFLICT (network) DO
1205 UPDATE SET url = excluded.url""",
1206 "%s" % single_network
, url
,
1209 def _parse_org_block(self
, block
, source_key
):
1213 key
, val
= split_line(line
)
1215 if key
== "organisation":
1216 org
[key
] = val
.upper()
1217 elif key
== "org-name":
1220 # Skip empty objects
1224 self
.db
.execute("INSERT INTO _organizations(handle, name, source) \
1225 VALUES(%s, %s, %s) ON CONFLICT (handle) DO \
1226 UPDATE SET name = excluded.name",
1227 org
.get("organisation"), org
.get("org-name"), source_key
,
1230 def _parse_line(self
, line
, source_key
, validcountries
=None):
1232 if line
.startswith("2"):
1236 if line
.startswith("#"):
1240 registry
, country_code
, type, line
= line
.split("|", 3)
1242 log
.warning("Could not parse line: %s" % line
)
1245 # Skip any unknown protocols
1246 if not type in ("ipv6", "ipv4"):
1247 log
.warning("Unknown IP protocol '%s'" % type)
1250 # Skip any lines that are for stats only or do not have a country
1251 # code at all (avoids log spam below)
1252 if not country_code
or country_code
== '*':
1255 # Skip objects with unknown country codes
1256 if validcountries
and country_code
not in validcountries
:
1257 log
.warning("Skipping line with bogus country '%s': %s" % \
1258 (country_code
, line
))
1262 address
, prefix
, date
, status
, organization
= line
.split("|")
1266 # Try parsing the line without organization
1268 address
, prefix
, date
, status
= line
.split("|")
1270 log
.warning("Unhandled line format: %s" % line
)
1273 # Skip anything that isn't properly assigned
1274 if not status
in ("assigned", "allocated"):
1277 # Cast prefix into an integer
1279 prefix
= int(prefix
)
1281 log
.warning("Invalid prefix: %s" % prefix
)
1284 # Fix prefix length for IPv4
1286 prefix
= 32 - int(math
.log(prefix
, 2))
1288 # Try to parse the address
1290 network
= ipaddress
.ip_network("%s/%s" % (address
, prefix
), strict
=False)
1292 log
.warning("Invalid IP address: %s" % address
)
1295 if not self
._check
_parsed
_network
(network
):
1311 ON CONFLICT (network)
1312 DO UPDATE SET country = excluded.country
1313 """, "%s" % network
, country_code
, [country
], source_key
,
1316 def _import_as_names_from_arin(self
, downloader
):
1317 # Delete all previously imported content
1318 self
.db
.execute("DELETE FROM autnums WHERE source = %s", "ARIN")
1320 # Try to retrieve the feed from ftp.arin.net
1321 feed
= downloader
.request_lines("https://ftp.arin.net/pub/resource_registry_service/asns.csv")
1323 # Walk through the file
1324 for line
in csv
.DictReader(feed
, dialect
="arin"):
1325 log
.debug("Processing object: %s" % line
)
1328 status
= line
.get("Status")
1330 # We are only interested in anything managed by ARIN
1331 if not status
== "Full Registry Services":
1334 # Fetch organization name
1335 name
= line
.get("Org Name")
1338 first_asn
= line
.get("Start AS Number")
1339 last_asn
= line
.get("End AS Number")
1343 first_asn
= int(first_asn
)
1344 except TypeError as e
:
1345 log
.warning("Could not parse ASN '%s'" % first_asn
)
1349 last_asn
= int(last_asn
)
1350 except TypeError as e
:
1351 log
.warning("Could not parse ASN '%s'" % last_asn
)
1354 # Check if the range is valid
1355 if last_asn
< first_asn
:
1356 log
.warning("Invalid ASN range %s-%s" % (first_asn
, last_asn
))
1358 # Insert everything into the database
1359 for asn
in range(first_asn
, last_asn
+ 1):
1360 if not self
._check
_parsed
_asn
(asn
):
1361 log
.warning("Skipping invalid ASN %s" % asn
)
1381 """, asn
, name
, "ARIN",
1384 def handle_update_announcements(self
, ns
):
1385 server
= ns
.server
[0]
1387 with self
.db
.transaction():
1388 if server
.startswith("/"):
1389 self
._handle
_update
_announcements
_from
_bird
(server
)
1391 # Purge anything we never want here
1393 -- Delete default routes
1394 DELETE FROM announcements WHERE network = '::/0' OR network = '0.0.0.0/0';
1396 -- Delete anything that is not global unicast address space
1397 DELETE FROM announcements WHERE family(network) = 6 AND NOT network <<= '2000::/3';
1399 -- DELETE "current network" address space
1400 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '0.0.0.0/8';
1402 -- DELETE local loopback address space
1403 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '127.0.0.0/8';
1405 -- DELETE RFC 1918 address space
1406 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '10.0.0.0/8';
1407 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '172.16.0.0/12';
1408 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '192.168.0.0/16';
1410 -- DELETE test, benchmark and documentation address space
1411 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '192.0.0.0/24';
1412 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '192.0.2.0/24';
1413 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '198.18.0.0/15';
1414 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '198.51.100.0/24';
1415 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '203.0.113.0/24';
1417 -- DELETE CGNAT address space (RFC 6598)
1418 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '100.64.0.0/10';
1420 -- DELETE link local address space
1421 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '169.254.0.0/16';
1423 -- DELETE IPv6 to IPv4 (6to4) address space (RFC 3068)
1424 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '192.88.99.0/24';
1425 DELETE FROM announcements WHERE family(network) = 6 AND network <<= '2002::/16';
1427 -- DELETE multicast and reserved address space
1428 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '224.0.0.0/4';
1429 DELETE FROM announcements WHERE family(network) = 4 AND network <<= '240.0.0.0/4';
1431 -- Delete networks that are too small to be in the global routing table
1432 DELETE FROM announcements WHERE family(network) = 6 AND masklen(network) > 48;
1433 DELETE FROM announcements WHERE family(network) = 4 AND masklen(network) > 24;
1435 -- Delete any non-public or reserved ASNs
1436 DELETE FROM announcements WHERE NOT (
1437 (autnum >= 1 AND autnum <= 23455)
1439 (autnum >= 23457 AND autnum <= 64495)
1441 (autnum >= 131072 AND autnum <= 4199999999)
1444 -- Delete everything that we have not seen for 14 days
1445 DELETE FROM announcements WHERE last_seen_at <= CURRENT_TIMESTAMP - INTERVAL '14 days';
1448 def _handle_update_announcements_from_bird(self
, server
):
1449 # Pre-compile the regular expression for faster searching
1450 route
= re
.compile(b
"^\s(.+?)\s+.+?\[(?:AS(.*?))?.\]$")
1452 log
.info("Requesting routing table from Bird (%s)" % server
)
1454 aggregated_networks
= []
1456 # Send command to list all routes
1457 for line
in self
._bird
_cmd
(server
, "show route"):
1458 m
= route
.match(line
)
1464 # Ignore any header lines with the name of the routing table
1465 elif line
.startswith(b
"Table"):
1470 log
.debug("Could not parse line: %s" % line
.decode())
1474 # Fetch the extracted network and ASN
1475 network
, autnum
= m
.groups()
1477 # Decode into strings
1479 network
= network
.decode()
1481 autnum
= autnum
.decode()
1483 # Collect all aggregated networks
1485 log
.debug("%s is an aggregated network" % network
)
1486 aggregated_networks
.append(network
)
1489 # Insert it into the database
1490 self
.db
.execute("INSERT INTO announcements(network, autnum) \
1491 VALUES(%s, %s) ON CONFLICT (network) DO \
1492 UPDATE SET autnum = excluded.autnum, last_seen_at = CURRENT_TIMESTAMP",
1496 # Process any aggregated networks
1497 for network
in aggregated_networks
:
1498 log
.debug("Processing aggregated network %s" % network
)
1500 # Run "show route all" for each network
1501 for line
in self
._bird
_cmd
(server
, "show route %s all" % network
):
1502 # Try finding the path
1503 m
= re
.match(b
"\s+BGP\.as_path:.* (\d+) {\d+}$", line
)
1505 # Select the last AS number in the path
1506 autnum
= m
.group(1).decode()
1508 # Insert it into the database
1509 self
.db
.execute("INSERT INTO announcements(network, autnum) \
1510 VALUES(%s, %s) ON CONFLICT (network) DO \
1511 UPDATE SET autnum = excluded.autnum, last_seen_at = CURRENT_TIMESTAMP",
1515 # We don't need to process any more
1518 def _bird_cmd(self
, socket_path
, command
):
1519 # Connect to the socket
1520 s
= socket
.socket(socket
.AF_UNIX
, socket
.SOCK_STREAM
)
1521 s
.connect(socket_path
)
1523 # Allocate some buffer
1526 log
.debug("Sending Bird command: %s" % command
)
1529 s
.send(b
"%s\n" % command
.encode())
1532 # Fill up the buffer
1533 buffer += s
.recv(4096)
1536 # Search for the next newline
1537 pos
= buffer.find(b
"\n")
1539 # If we cannot find one, we go back and read more data
1543 # Cut after the newline character
1546 # Split the line we want and keep the rest in buffer
1547 line
, buffer = buffer[:pos
], buffer[pos
:]
1549 # Try parsing any status lines
1550 if len(line
) > 4 and line
[:4].isdigit() and line
[4] in (32, 45):
1551 code
, delim
, line
= int(line
[:4]), line
[4], line
[5:]
1553 log
.debug("Received response code %s from bird" % code
)
1563 # Otherwise return the line
1566 def handle_update_geofeeds(self
, ns
):
1568 with self
.db
.transaction():
1569 # Delete all geofeeds which are no longer linked
1580 geofeeds.url = network_geofeeds.url
1599 # Fetch all Geofeeds that require an update
1600 geofeeds
= self
.db
.query("""
1609 updated_at <= CURRENT_TIMESTAMP - INTERVAL '1 week'
1614 # Create a downloader
1615 downloader
= location
.importer
.Downloader()
1617 # Pass the downloader to the fetch_geofeed function
1618 fetch_geofeed
= functools
.partial(self
._fetch
_geofeed
, downloader
)
1620 with concurrent
.futures
.ThreadPoolExecutor(max_workers
=10) as executor
:
1621 results
= executor
.map(fetch_geofeed
, geofeeds
)
1623 # Fetch all results to raise any exceptions
1624 for result
in results
:
1627 # Delete data from any feeds that did not update in the last two weeks
1628 with self
.db
.transaction():
1633 geofeed_networks.geofeed_id IN (
1641 updated_at <= CURRENT_TIMESTAMP - INTERVAL '2 weeks'
1645 def _fetch_geofeed(self
, downloader
, geofeed
):
1646 log
.debug("Fetching Geofeed %s" % geofeed
.url
)
1648 with self
.db
.transaction():
1652 f
= downloader
.retrieve(geofeed
.url
, headers
={
1653 "User-Agent" : "location/%s" % location
.__version
__,
1655 # We expect some plain text file in CSV format
1656 "Accept" : "text/csv, text/plain",
1659 # Remove any previous data
1660 self
.db
.execute("DELETE FROM geofeed_networks \
1661 WHERE geofeed_id = %s", geofeed
.id)
1665 # Read the output line by line
1670 line
= line
.decode()
1672 # Ignore any lines we cannot decode
1673 except UnicodeDecodeError:
1674 log
.debug("Could not decode line %s in %s" \
1675 % (lineno
, geofeed
.url
))
1679 line
= line
.rstrip()
1685 # Try to parse the line
1687 fields
= line
.split(",", 5)
1689 log
.debug("Could not parse line: %s" % line
)
1692 # Check if we have enough fields
1694 log
.debug("Not enough fields in line: %s" % line
)
1698 network
, country
, region
, city
, = fields
[:4]
1700 # Try to parse the network
1702 network
= ipaddress
.ip_network(network
, strict
=False)
1704 log
.debug("Could not parse network: %s" % network
)
1707 # Strip any excess whitespace from country codes
1708 country
= country
.strip()
1710 # Make the country code uppercase
1711 country
= country
.upper()
1713 # Check the country code
1715 log
.debug("Empty country code in Geofeed %s line %s" \
1716 % (geofeed
.url
, lineno
))
1719 elif not location
.country_code_is_valid(country
):
1720 log
.debug("Invalid country code in Geofeed %s:%s: %s" \
1721 % (geofeed
.url
, lineno
, country
))
1724 # Write this into the database
1734 VALUES (%s, %s, %s, %s, %s)""",
1742 # Catch any HTTP errors
1743 except urllib
.request
.HTTPError
as e
:
1744 self
.db
.execute("UPDATE geofeeds SET status = %s, error = %s \
1745 WHERE id = %s", e
.code
, "%s" % e
, geofeed
.id)
1747 # Remove any previous data when the feed has been deleted
1749 self
.db
.execute("DELETE FROM geofeed_networks \
1750 WHERE geofeed_id = %s", geofeed
.id)
1752 # Catch any other errors and connection timeouts
1753 except (http
.client
.InvalidURL
, urllib
.request
.URLError
, TimeoutError
) as e
:
1754 log
.debug("Could not fetch URL %s: %s" % (geofeed
.url
, e
))
1756 self
.db
.execute("UPDATE geofeeds SET status = %s, error = %s \
1757 WHERE id = %s", 599, "%s" % e
, geofeed
.id)
1759 # Mark the geofeed as updated
1765 updated_at = CURRENT_TIMESTAMP,
1773 def handle_update_overrides(self
, ns
):
1774 with self
.db
.transaction():
1775 # Drop any previous content
1776 self
.db
.execute("TRUNCATE TABLE autnum_overrides")
1777 self
.db
.execute("TRUNCATE TABLE network_overrides")
1779 for file in ns
.files
:
1780 log
.info("Reading %s..." % file)
1782 with
open(file, "rb") as f
:
1783 for type, block
in location
.importer
.read_blocks(f
):
1785 network
= block
.get("net")
1786 # Try to parse and normalise the network
1788 network
= ipaddress
.ip_network(network
, strict
=False)
1789 except ValueError as e
:
1790 log
.warning("Invalid IP network: %s: %s" % (network
, e
))
1793 # Prevent that we overwrite all networks
1794 if network
.prefixlen
== 0:
1795 log
.warning("Skipping %s: You cannot overwrite default" % network
)
1805 is_satellite_provider,
1811 %s, %s, %s, %s, %s, %s
1813 ON CONFLICT (network) DO NOTHING
1816 block
.get("country"),
1817 self
._parse
_bool
(block
, "is-anonymous-proxy"),
1818 self
._parse
_bool
(block
, "is-satellite-provider"),
1819 self
._parse
_bool
(block
, "is-anycast"),
1820 self
._parse
_bool
(block
, "drop"),
1823 elif type == "aut-num":
1824 autnum
= block
.get("aut-num")
1826 # Check if AS number begins with "AS"
1827 if not autnum
.startswith("AS"):
1828 log
.warning("Invalid AS number: %s" % autnum
)
1842 is_satellite_provider,
1848 %s, %s, %s, %s, %s, %s, %s
1850 ON CONFLICT (number) DO NOTHING
1854 block
.get("country"),
1855 self
._parse
_bool
(block
, "is-anonymous-proxy"),
1856 self
._parse
_bool
(block
, "is-satellite-provider"),
1857 self
._parse
_bool
(block
, "is-anycast"),
1858 self
._parse
_bool
(block
, "drop"),
1862 log
.warning("Unsupported type: %s" % type)
1864 def handle_update_feeds(self
, ns
):
1866 Update any third-party feeds
1870 # Create a downloader
1871 downloader
= location
.importer
.Downloader()
1875 ("AWS-IP-RANGES", self
._import
_aws
_ip
_ranges
, "https://ip-ranges.amazonaws.com/ip-ranges.json"),
1878 ("SPAMHAUS-DROP", self
._import
_spamhaus
_drop
, "https://www.spamhaus.org/drop/drop.txt"),
1879 ("SPAMHAUS-EDROP", self
._import
_spamhaus
_drop
, "https://www.spamhaus.org/drop/edrop.txt"),
1880 ("SPAMHAUS-DROPV6", self
._import
_spamhaus
_drop
, "https://www.spamhaus.org/drop/dropv6.txt"),
1883 ("SPAMHAUS-ASNDROP", self
._import
_spamhaus
_asndrop
, "https://www.spamhaus.org/drop/asndrop.json"),
1886 # Drop any data from feeds that we don't support (any more)
1887 with self
.db
.transaction():
1888 # Fetch the names of all feeds we support
1889 sources
= [name
for name
, *rest
in feeds
]
1891 self
.db
.execute("DELETE FROM autnum_feeds WHERE NOT source = ANY(%s)", sources
)
1892 self
.db
.execute("DELETE FROM network_feeds WHERE NOT source = ANY(%s)", sources
)
1894 # Walk through all feeds
1895 for name
, callback
, url
, *args
in feeds
:
1896 # Skip any feeds that were not requested on the command line
1897 if ns
.feeds
and not name
in ns
.feeds
:
1901 self
._process
_feed
(downloader
, name
, callback
, url
, *args
)
1903 # Log an error but continue if an exception occurs
1904 except Exception as e
:
1905 log
.error("Error processing feed '%s': %s" % (name
, e
))
1909 return 0 if success
else 1
1911 def _process_feed(self
, downloader
, name
, callback
, url
, *args
):
1916 f
= downloader
.retrieve(url
)
1918 with self
.db
.transaction():
1919 # Drop any previous content
1920 self
.db
.execute("DELETE FROM autnum_feeds WHERE source = %s", name
)
1921 self
.db
.execute("DELETE FROM network_feeds WHERE source = %s", name
)
1923 # Call the callback to process the feed
1924 return callback(name
, f
, *args
)
1926 def _import_aws_ip_ranges(self
, name
, f
):
1930 # Set up a dictionary for mapping a region name to a country. Unfortunately,
1931 # there seems to be no machine-readable version available of this other than
1932 # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-regions-availability-zones.html
1933 # (worse, it seems to be incomplete :-/ ); https://www.cloudping.cloud/endpoints
1934 # was helpful here as well.
1935 aws_region_country_map
= {
1937 "af-south-1" : "ZA",
1940 "il-central-1" : "IL", # Tel Aviv
1943 "ap-northeast-1" : "JP",
1944 "ap-northeast-2" : "KR",
1945 "ap-northeast-3" : "JP",
1947 "ap-south-1" : "IN",
1948 "ap-south-2" : "IN",
1949 "ap-southeast-1" : "SG",
1950 "ap-southeast-2" : "AU",
1951 "ap-southeast-3" : "MY",
1952 "ap-southeast-4" : "AU",
1953 "ap-southeast-5" : "NZ", # Auckland, NZ
1954 "ap-southeast-6" : "AP", # XXX: Precise location not documented anywhere
1957 "ca-central-1" : "CA",
1961 "eu-central-1" : "DE",
1962 "eu-central-2" : "CH",
1963 "eu-north-1" : "SE",
1967 "eu-south-1" : "IT",
1968 "eu-south-2" : "ES",
1971 "me-central-1" : "AE",
1972 "me-south-1" : "BH",
1977 # Undocumented, likely located in Berlin rather than Frankfurt
1978 "eusc-de-east-1" : "DE",
1981 # Collect a list of all networks
1982 prefixes
= feed
.get("ipv6_prefixes", []) + feed
.get("prefixes", [])
1984 for prefix
in prefixes
:
1986 network
= prefix
.get("ipv6_prefix") or prefix
.get("ip_prefix")
1990 network
= ipaddress
.ip_network(network
)
1991 except ValuleError
as e
:
1992 log
.warning("%s: Unable to parse prefix %s" % (name
, network
))
1995 # Sanitize parsed networks...
1996 if not self
._check
_parsed
_network
(network
):
2000 region
= prefix
.get("region")
2006 # Fetch the CC from the dictionary
2008 cc
= aws_region_country_map
[region
]
2010 # If we couldn't find anything, let's try something else...
2011 except KeyError as e
:
2012 # Find anycast networks
2013 if region
== "GLOBAL":
2016 # Everything that starts with us- is probably in the United States
2017 elif region
.startswith("us-"):
2020 # Everything that starts with cn- is probably China
2021 elif region
.startswith("cn-"):
2024 # Log a warning for anything else
2026 log
.warning("%s: Could not determine country code for AWS region %s" \
2044 ON CONFLICT (network, source) DO NOTHING
2045 """, "%s" % network
, name
, cc
, is_anycast
,
2048 def _import_spamhaus_drop(self
, name
, f
):
2050 Import Spamhaus DROP IP feeds
2055 # Walk through all lines
2058 line
= line
.decode("utf-8")
2060 # Strip off any comments
2061 line
, _
, comment
= line
.partition(";")
2063 # Ignore empty lines
2067 # Strip any excess whitespace
2070 # Increment line counter
2075 network
= ipaddress
.ip_network(line
)
2076 except ValueError as e
:
2077 log
.warning("%s: Could not parse network: %s - %s" % (name
, line
, e
))
2081 if not self
._check
_parsed
_network
(network
):
2082 log
.warning("%s: Skipping bogus network: %s" % (name
, network
))
2085 # Insert into the database
2097 )""", "%s" % network
, name
, True,
2100 # Raise an exception if we could not import anything
2102 raise RuntimeError("Received bogus feed %s with no data" % name
)
2104 def _import_spamhaus_asndrop(self
, name
, f
):
2106 Import Spamhaus ASNDROP feed
2110 line
= line
.decode("utf-8")
2114 line
= json
.loads(line
)
2115 except json
.JSONDecodeError
as e
:
2116 log
.warning("%s: Unable to parse JSON object %s: %s" % (name
, line
, e
))
2120 type = line
.get("type")
2123 if type == "metadata":
2127 asn
= line
.get("asn")
2129 # Skip any lines without an ASN
2133 # Filter invalid ASNs
2134 if not self
._check
_parsed
_asn
(asn
):
2135 log
.warning("%s: Skipping bogus ASN %s" % (name
, asn
))
2150 )""", "%s" % asn
, name
, True,
2154 def _parse_bool(block
, key
):
2155 val
= block
.get(key
)
2157 # There is no point to proceed when we got None
2161 # Convert to lowercase
2165 if val
in ("yes", "1"):
2169 if val
in ("no", "0"):
2175 def handle_import_countries(self
, ns
):
2176 with self
.db
.transaction():
2177 # Drop all data that we have
2178 self
.db
.execute("TRUNCATE TABLE countries")
2180 for file in ns
.file:
2182 line
= line
.rstrip()
2184 # Ignore any comments
2185 if line
.startswith("#"):
2189 country_code
, continent_code
, name
= line
.split(maxsplit
=2)
2191 log
.warning("Could not parse line: %s" % line
)
2194 self
.db
.execute("INSERT INTO countries(country_code, name, continent_code) \
2195 VALUES(%s, %s, %s) ON CONFLICT DO NOTHING", country_code
, name
, continent_code
)
2198 def split_line(line
):
2199 key
, colon
, val
= line
.partition(":")
2201 # Strip any excess space
2208 # Run the command line interface