]> git.ipfire.org Git - location/libloc.git/blame_incremental - src/python/location-downloader.in
python: Correctly set log level for root logger
[location/libloc.git] / src / python / location-downloader.in
... / ...
CommitLineData
1#!/usr/bin/python3
2###############################################################################
3# #
4# libloc - A library to determine the location of someone on the Internet #
5# #
6# Copyright (C) 2019 IPFire Development Team <info@ipfire.org> #
7# #
8# This library is free software; you can redistribute it and/or #
9# modify it under the terms of the GNU Lesser General Public #
10# License as published by the Free Software Foundation; either #
11# version 2.1 of the License, or (at your option) any later version. #
12# #
13# This library is distributed in the hope that it will be useful, #
14# but WITHOUT ANY WARRANTY; without even the implied warranty of #
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #
16# Lesser General Public License for more details. #
17# #
18###############################################################################
19
20import argparse
21import datetime
22import logging
23import lzma
24import os
25import random
26import shutil
27import sys
28import tempfile
29import time
30import urllib.error
31import urllib.parse
32import urllib.request
33
34# Load our location module
35import location
36from location.i18n import _
37
38DATABASE_FILENAME = "test.db.xz"
39MIRRORS = (
40 "https://location.ipfire.org/databases/",
41 "https://people.ipfire.org/~ms/location/",
42)
43
44# Initialise logging
45log = logging.getLogger("location.downloader")
46log.propagate = 1
47
48class Downloader(object):
49 def __init__(self, mirrors):
50 self.mirrors = list(mirrors)
51
52 # Randomize mirrors
53 random.shuffle(self.mirrors)
54
55 # Get proxies from environment
56 self.proxies = self._get_proxies()
57
58 def _get_proxies(self):
59 proxies = {}
60
61 for protocol in ("https", "http"):
62 proxy = os.environ.get("%s_proxy" % protocol, None)
63
64 if proxy:
65 proxies[protocol] = proxy
66
67 return proxies
68
69 def _make_request(self, url, baseurl=None, headers={}):
70 if baseurl:
71 url = urllib.parse.urljoin(baseurl, url)
72
73 req = urllib.request.Request(url, method="GET")
74
75 # Update headers
76 headers.update({
77 "User-Agent" : "location-downloader/@VERSION@",
78 })
79
80 # Set headers
81 for header in headers:
82 req.add_header(header, headers[header])
83
84 # Set proxies
85 for protocol in self.proxies:
86 req.set_proxy(self.proxies[protocol], protocol)
87
88 return req
89
90 def _send_request(self, req, **kwargs):
91 # Log request headers
92 log.debug("HTTP %s Request to %s" % (req.method, req.host))
93 log.debug(" URL: %s" % req.full_url)
94 log.debug(" Headers:")
95 for k, v in req.header_items():
96 log.debug(" %s: %s" % (k, v))
97
98 try:
99 res = urllib.request.urlopen(req, **kwargs)
100
101 except urllib.error.HTTPError as e:
102 # Log response headers
103 log.debug("HTTP Response: %s" % e.code)
104 log.debug(" Headers:")
105 for header in e.headers:
106 log.debug(" %s: %s" % (header, e.headers[header]))
107
108 # Raise all other errors
109 raise e
110
111 # Log response headers
112 log.debug("HTTP Response: %s" % res.code)
113 log.debug(" Headers:")
114 for k, v in res.getheaders():
115 log.debug(" %s: %s" % (k, v))
116
117 return res
118
119 def download(self, url, public_key, timestamp=None, **kwargs):
120 headers = {}
121
122 if timestamp:
123 headers["If-Modified-Since"] = timestamp.strftime(
124 "%a, %d %b %Y %H:%M:%S GMT",
125 )
126
127 t = tempfile.NamedTemporaryFile(delete=False)
128 with t:
129 # Try all mirrors
130 for mirror in self.mirrors:
131 # Prepare HTTP request
132 req = self._make_request(url, baseurl=mirror, headers=headers)
133
134 try:
135 with self._send_request(req) as res:
136 decompressor = lzma.LZMADecompressor()
137
138 # Read all data
139 while True:
140 buf = res.read(1024)
141 if not buf:
142 break
143
144 # Decompress data
145 buf = decompressor.decompress(buf)
146 if buf:
147 t.write(buf)
148
149 # Write all data to disk
150 t.flush()
151
152 # Catch decompression errors
153 except lzma.LZMAError as e:
154 log.warning("Could not decompress downloaded file: %s" % e)
155 continue
156
157 except urllib.error.HTTPError as e:
158 # The file on the server was too old
159 if e.code == 304:
160 log.warning("%s is serving an outdated database. Trying next mirror..." % mirror)
161
162 # Log any other HTTP errors
163 else:
164 log.warning("%s reported: %s" % (mirror, e))
165
166 # Throw away any downloaded content and try again
167 t.truncate()
168
169 else:
170 # Check if the downloaded database is recent
171 if not self._check_database(t, public_key, timestamp):
172 log.warning("Downloaded database is outdated. Trying next mirror...")
173
174 # Throw away the data and try again
175 t.truncate()
176 continue
177
178 # Return temporary file
179 return t
180
181 raise FileNotFoundError(url)
182
183 def _check_database(self, f, public_key, timestamp=None):
184 """
185 Checks the downloaded database if it can be opened,
186 verified and if it is recent enough
187 """
188 log.debug("Opening downloaded database at %s" % f.name)
189
190 db = location.Database(f.name)
191
192 # Database is not recent
193 if timestamp and db.created_at < timestamp.timestamp():
194 return False
195
196 log.info("Downloaded new database from %s" % (time.strftime(
197 "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(db.created_at),
198 )))
199
200 # Verify the database
201 with open(public_key, "r") as f:
202 if not db.verify(f):
203 log.error("Could not verify database")
204 return False
205
206 return True
207
208
209class CLI(object):
210 def __init__(self):
211 self.downloader = Downloader(mirrors=MIRRORS)
212
213 def parse_cli(self):
214 parser = argparse.ArgumentParser(
215 description=_("Location Downloader Command Line Interface"),
216 )
217 subparsers = parser.add_subparsers()
218
219 # Global configuration flags
220 parser.add_argument("--debug", action="store_true",
221 help=_("Enable debug output"))
222
223 # version
224 parser.add_argument("--version", action="version",
225 version="%(prog)s @VERSION@")
226
227 # database
228 parser.add_argument("--database", "-d",
229 default="@databasedir@/database.db", help=_("Path to database"),
230 )
231
232 # public key
233 parser.add_argument("--public-key", "-k",
234 default="@databasedir@/signing-key.pem", help=_("Public Signing Key"),
235 )
236
237 # Update
238 update = subparsers.add_parser("update", help=_("Update database"))
239 update.set_defaults(func=self.handle_update)
240
241 # Verify
242 verify = subparsers.add_parser("verify",
243 help=_("Verify the downloaded database"))
244 verify.set_defaults(func=self.handle_verify)
245
246 args = parser.parse_args()
247
248 # Enable debug logging
249 if args.debug:
250 location.logger.set_level(logging.DEBUG)
251
252 # Print usage if no action was given
253 if not "func" in args:
254 parser.print_usage()
255 sys.exit(2)
256
257 return args
258
259 def run(self):
260 # Parse command line arguments
261 args = self.parse_cli()
262
263 # Call function
264 ret = args.func(args)
265
266 # Return with exit code
267 if ret:
268 sys.exit(ret)
269
270 # Otherwise just exit
271 sys.exit(0)
272
273 def handle_update(self, ns):
274 # Fetch the version we need from DNS
275 t = location.discover_latest_version()
276
277 # Parse timestamp into datetime format
278 try:
279 timestamp = datetime.datetime.fromtimestamp(t)
280 except:
281 raise
282
283 # Open database
284 try:
285 db = location.Database(ns.database)
286
287 # Check if we are already on the latest version
288 if db.created_at >= timestamp.timestamp():
289 log.info("Already on the latest version")
290 return
291
292 except FileNotFoundError as e:
293 db = None
294
295 # Try downloading a new database
296 try:
297 t = self.downloader.download(DATABASE_FILENAME,
298 public_key=ns.public_key, timestamp=timestamp)
299
300 # If no file could be downloaded, log a message
301 except FileNotFoundError as e:
302 log.error("Could not download a new database")
303 return 1
304
305 # If we have not received a new file, there is nothing to do
306 if not t:
307 return 3
308
309 # Write temporary file to destination
310 shutil.copyfile(t.name, ns.database)
311
312 # Remove temporary file
313 os.unlink(t.name)
314
315 return 0
316
317 def handle_verify(self, ns):
318 try:
319 db = location.Database(ns.database)
320 except FileNotFoundError as e:
321 log.error("%s: %s" % (ns.database, e))
322 return 127
323
324 # Verify the database
325 with open(ns.public_key, "r") as f:
326 if not db.verify(f):
327 log.error("Could not verify database")
328 return 1
329
330 # Success
331 log.debug("Database successfully verified")
332 return 0
333
334
335def main():
336 # Run the command line interface
337 c = CLI()
338 c.run()
339
340main()