]> git.ipfire.org Git - location/libloc.git/blob - src/python/location-downloader.in
Add quiet mode
[location/libloc.git] / src / python / location-downloader.in
1 #!/usr/bin/python3
2 ###############################################################################
3 # #
4 # libloc - A library to determine the location of someone on the Internet #
5 # #
6 # Copyright (C) 2019 IPFire Development Team <info@ipfire.org> #
7 # #
8 # This library is free software; you can redistribute it and/or #
9 # modify it under the terms of the GNU Lesser General Public #
10 # License as published by the Free Software Foundation; either #
11 # version 2.1 of the License, or (at your option) any later version. #
12 # #
13 # This library is distributed in the hope that it will be useful, #
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
16 # Lesser General Public License for more details. #
17 # #
18 ###############################################################################
19
20 import argparse
21 import datetime
22 import logging
23 import lzma
24 import os
25 import random
26 import shutil
27 import sys
28 import tempfile
29 import time
30 import urllib.error
31 import urllib.parse
32 import urllib.request
33
34 # Load our location module
35 import location
36 from location.i18n import _
37
38 DATABASE_FILENAME = "test.db.xz"
39 MIRRORS = (
40 "https://location.ipfire.org/databases/",
41 "https://people.ipfire.org/~ms/location/",
42 )
43
44 # Initialise logging
45 log = logging.getLogger("location.downloader")
46 log.propagate = 1
47
48 class Downloader(object):
49 def __init__(self, mirrors):
50 self.mirrors = list(mirrors)
51
52 # Randomize mirrors
53 random.shuffle(self.mirrors)
54
55 # Get proxies from environment
56 self.proxies = self._get_proxies()
57
58 def _get_proxies(self):
59 proxies = {}
60
61 for protocol in ("https", "http"):
62 proxy = os.environ.get("%s_proxy" % protocol, None)
63
64 if proxy:
65 proxies[protocol] = proxy
66
67 return proxies
68
69 def _make_request(self, url, baseurl=None, headers={}):
70 if baseurl:
71 url = urllib.parse.urljoin(baseurl, url)
72
73 req = urllib.request.Request(url, method="GET")
74
75 # Update headers
76 headers.update({
77 "User-Agent" : "location-downloader/@VERSION@",
78 })
79
80 # Set headers
81 for header in headers:
82 req.add_header(header, headers[header])
83
84 # Set proxies
85 for protocol in self.proxies:
86 req.set_proxy(self.proxies[protocol], protocol)
87
88 return req
89
90 def _send_request(self, req, **kwargs):
91 # Log request headers
92 log.debug("HTTP %s Request to %s" % (req.method, req.host))
93 log.debug(" URL: %s" % req.full_url)
94 log.debug(" Headers:")
95 for k, v in req.header_items():
96 log.debug(" %s: %s" % (k, v))
97
98 try:
99 res = urllib.request.urlopen(req, **kwargs)
100
101 except urllib.error.HTTPError as e:
102 # Log response headers
103 log.debug("HTTP Response: %s" % e.code)
104 log.debug(" Headers:")
105 for header in e.headers:
106 log.debug(" %s: %s" % (header, e.headers[header]))
107
108 # Raise all other errors
109 raise e
110
111 # Log response headers
112 log.debug("HTTP Response: %s" % res.code)
113 log.debug(" Headers:")
114 for k, v in res.getheaders():
115 log.debug(" %s: %s" % (k, v))
116
117 return res
118
119 def download(self, url, public_key, timestamp=None, **kwargs):
120 headers = {}
121
122 if timestamp:
123 headers["If-Modified-Since"] = timestamp.strftime(
124 "%a, %d %b %Y %H:%M:%S GMT",
125 )
126
127 t = tempfile.NamedTemporaryFile(delete=False)
128 with t:
129 # Try all mirrors
130 for mirror in self.mirrors:
131 # Prepare HTTP request
132 req = self._make_request(url, baseurl=mirror, headers=headers)
133
134 try:
135 with self._send_request(req) as res:
136 decompressor = lzma.LZMADecompressor()
137
138 # Read all data
139 while True:
140 buf = res.read(1024)
141 if not buf:
142 break
143
144 # Decompress data
145 buf = decompressor.decompress(buf)
146 if buf:
147 t.write(buf)
148
149 # Write all data to disk
150 t.flush()
151
152 # Catch decompression errors
153 except lzma.LZMAError as e:
154 log.warning("Could not decompress downloaded file: %s" % e)
155 continue
156
157 except urllib.error.HTTPError as e:
158 # The file on the server was too old
159 if e.code == 304:
160 log.warning("%s is serving an outdated database. Trying next mirror..." % mirror)
161
162 # Log any other HTTP errors
163 else:
164 log.warning("%s reported: %s" % (mirror, e))
165
166 # Throw away any downloaded content and try again
167 t.truncate()
168
169 else:
170 # Check if the downloaded database is recent
171 if not self._check_database(t, public_key, timestamp):
172 log.warning("Downloaded database is outdated. Trying next mirror...")
173
174 # Throw away the data and try again
175 t.truncate()
176 continue
177
178 # Return temporary file
179 return t
180
181 raise FileNotFoundError(url)
182
183 def _check_database(self, f, public_key, timestamp=None):
184 """
185 Checks the downloaded database if it can be opened,
186 verified and if it is recent enough
187 """
188 log.debug("Opening downloaded database at %s" % f.name)
189
190 db = location.Database(f.name)
191
192 # Database is not recent
193 if timestamp and db.created_at < timestamp.timestamp():
194 return False
195
196 log.info("Downloaded new database from %s" % (time.strftime(
197 "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(db.created_at),
198 )))
199
200 # Verify the database
201 with open(public_key, "r") as f:
202 if not db.verify(f):
203 log.error("Could not verify database")
204 return False
205
206 return True
207
208
209 class CLI(object):
210 def __init__(self):
211 self.downloader = Downloader(mirrors=MIRRORS)
212
213 def parse_cli(self):
214 parser = argparse.ArgumentParser(
215 description=_("Location Downloader Command Line Interface"),
216 )
217 subparsers = parser.add_subparsers()
218
219 # Global configuration flags
220 parser.add_argument("--debug", action="store_true",
221 help=_("Enable debug output"))
222 parser.add_argument("--quiet", action="store_true",
223 help=_("Enable quiet mode"))
224
225 # version
226 parser.add_argument("--version", action="version",
227 version="%(prog)s @VERSION@")
228
229 # database
230 parser.add_argument("--database", "-d",
231 default="@databasedir@/database.db", help=_("Path to database"),
232 )
233
234 # public key
235 parser.add_argument("--public-key", "-k",
236 default="@databasedir@/signing-key.pem", help=_("Public Signing Key"),
237 )
238
239 # Update
240 update = subparsers.add_parser("update", help=_("Update database"))
241 update.set_defaults(func=self.handle_update)
242
243 # Verify
244 verify = subparsers.add_parser("verify",
245 help=_("Verify the downloaded database"))
246 verify.set_defaults(func=self.handle_verify)
247
248 args = parser.parse_args()
249
250 # Configure logging
251 if args.debug:
252 location.logger.set_level(logging.DEBUG)
253 elif args.quiet:
254 location.logger.set_level(logging.WARNING)
255
256 # Print usage if no action was given
257 if not "func" in args:
258 parser.print_usage()
259 sys.exit(2)
260
261 return args
262
263 def run(self):
264 # Parse command line arguments
265 args = self.parse_cli()
266
267 # Call function
268 ret = args.func(args)
269
270 # Return with exit code
271 if ret:
272 sys.exit(ret)
273
274 # Otherwise just exit
275 sys.exit(0)
276
277 def handle_update(self, ns):
278 # Fetch the version we need from DNS
279 t = location.discover_latest_version()
280
281 # Parse timestamp into datetime format
282 try:
283 timestamp = datetime.datetime.fromtimestamp(t)
284 except:
285 raise
286
287 # Open database
288 try:
289 db = location.Database(ns.database)
290
291 # Check if we are already on the latest version
292 if db.created_at >= timestamp.timestamp():
293 log.info("Already on the latest version")
294 return
295
296 except FileNotFoundError as e:
297 db = None
298
299 # Try downloading a new database
300 try:
301 t = self.downloader.download(DATABASE_FILENAME,
302 public_key=ns.public_key, timestamp=timestamp)
303
304 # If no file could be downloaded, log a message
305 except FileNotFoundError as e:
306 log.error("Could not download a new database")
307 return 1
308
309 # If we have not received a new file, there is nothing to do
310 if not t:
311 return 3
312
313 # Write temporary file to destination
314 shutil.copyfile(t.name, ns.database)
315
316 # Remove temporary file
317 os.unlink(t.name)
318
319 return 0
320
321 def handle_verify(self, ns):
322 try:
323 db = location.Database(ns.database)
324 except FileNotFoundError as e:
325 log.error("%s: %s" % (ns.database, e))
326 return 127
327
328 # Verify the database
329 with open(ns.public_key, "r") as f:
330 if not db.verify(f):
331 log.error("Could not verify database")
332 return 1
333
334 # Success
335 log.debug("Database successfully verified")
336 return 0
337
338
339 def main():
340 # Run the command line interface
341 c = CLI()
342 c.run()
343
344 main()