From: Jason Ish Date: Thu, 14 Dec 2017 21:42:23 +0000 (-0600) Subject: on download failure, return cached version (if exists) X-Git-Tag: 1.0.0b1~3 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=05399103bd888c70a0d1f8e3fa4dc93d4d7d7e84;p=thirdparty%2Fsuricata-update.git on download failure, return cached version (if exists) Previously if a download failed, an error would be logged and the rules that were downloaded were processed. This could lead to an output that was no expected (missing rules). Now if the download fails, check if we have the previous download. If we do, log a warning and return the cached files. If we don't have the previous download, log an error and process what rules we do have. --- diff --git a/suricata/update/main.py b/suricata/update/main.py index 5f5b08a..f04d77e 100644 --- a/suricata/update/main.py +++ b/suricata/update/main.py @@ -31,6 +31,7 @@ import types import shutil import glob import io +import tempfile try: # Python 3. @@ -351,14 +352,22 @@ class Fetch: os.makedirs(config.get_cache_dir(), mode=0o770) logger.info("Fetching %s." % (url)) try: + tmp_fileobj = tempfile.NamedTemporaryFile() suricata.update.net.get( url, - open(tmp_filename, "wb"), + tmp_fileobj, progress_hook=self.progress_hook) - except: + shutil.copyfile(tmp_fileobj.name, tmp_filename) + tmp_fileobj.close() + except URLError as err: if os.path.exists(tmp_filename): - os.unlink(tmp_filename) - raise + logger.warning( + "Failed to fetch %s, " + "will use latest cached version: %s", url, err) + return self.extract_files(tmp_filename) + raise err + except Exception as err: + raise err if not config.args().quiet: self.progress_hook_finish() logger.info("Done.")