parser.add_argument("name", metavar="<name>", nargs="?",
help="Name of source")
parser.add_argument("url", metavar="<url>", nargs="?", help="Source URL")
- parser.add_argument("--header", metavar="<header>", help="HTTP Header")
+ parser.add_argument("--http-header", metavar="<http-header>",
+ help="Additional HTTP header to add to requests")
parser.set_defaults(func=add_source)
def add_source():
args = config.args()
- header = None
if args.name:
name = args.name
if url:
break
- if args.header:
- header = args.header
+ header = args.http_header if args.http_header else None
source_config = sources.SourceConfiguration(name, header=header, url=url)
sources.save_source_config(source_config)
"%s-%s" % (url_hash, self.url_basename(url)))
def fetch(self, url):
+ net_arg = url
+ url = url[0] if isinstance(url, tuple) else url
tmp_filename = self.get_tmp_filename(url)
if not config.args().force and os.path.exists(tmp_filename):
if not config.args().now and \
try:
tmp_fileobj = tempfile.NamedTemporaryFile()
suricata.update.net.get(
- url,
+ net_arg,
tmp_fileobj,
progress_hook=self.progress_hook)
shutil.copyfile(tmp_fileobj.name, tmp_filename)
fetched = self.fetch(url)
files.update(fetched)
except URLError as err:
+ url = url[0] if isinstance(url, tuple) else url
logger.error("Failed to fetch %s: %s", url, err)
else:
for url in self.args.url:
params.update(internal_params)
if "url" in source:
# No need to go off to the index.
- url = source["url"] % params
+ url = (source["url"] % params, source.get("http-header"))
+ logger.debug("Resolved source %s to URL %s.", name, url[0])
else:
if not index:
raise exceptions.ApplicationError(
"Source index is required for source %s; "
"run suricata-update update-sources" % (source["source"]))
url = index.resolve_url(name, params)
- logger.debug("Resolved source %s to URL %s.", name, url)
+ logger.debug("Resolved source %s to URL %s.", name, url)
urls.append(url)
if config.get("sources"):
import platform
import logging
import ssl
+import re
try:
# Python 3.3...
return "Suricata-Update/%s (%s)" % (
version, "; ".join(params))
+
+def is_header_clean(header):
+ if len(header) != 2:
+ return False
+ name, val = header[0].strip(), header[1].strip()
+ if re.match( r"^[\w-]+$", name) and re.match(r"^[\w-]+$", val):
+ return True
+ return False
+
+
def get(url, fileobj, progress_hook=None):
""" Perform a GET request against a URL writing the contents into
the provideded file like object.
if user_agent:
logger.debug("Setting HTTP User-Agent to %s", user_agent)
- opener.addheaders = [("User-Agent", user_agent),]
+ http_headers = [("User-Agent", user_agent)]
else:
- opener.addheaders = [(header, value) for header,
- value in opener.addheaders if header.lower() != "user-agent"]
- remote = opener.open(url)
- info = remote.info()
+ http_headers = [(header, value) for header,
+ value in opener.addheaders if header.lower() != "user-agent"]
+ if isinstance(url, tuple):
+ header = url[1].split(":") if url[1] is not None else None
+ if header and is_header_clean(header=header):
+ name, val = header[0].strip(), header[1].strip()
+ logger.debug("Setting HTTP header %s to %s", name, val)
+ http_headers.append((name, val))
+ elif header:
+ logger.error("Header not set as it does not meet the criteria")
+ url = url[0]
+ opener.addheaders = http_headers
+
try:
- content_length = int(info["content-length"])
- except:
- content_length = 0
- bytes_read = 0
- while True:
- buf = remote.read(GET_BLOCK_SIZE)
- if not buf:
- # EOF
- break
- bytes_read += len(buf)
- fileobj.write(buf)
- if progress_hook:
- progress_hook(content_length, bytes_read)
- remote.close()
- fileobj.flush()
- return bytes_read, info
+ remote = opener.open(url)
+ except ValueError as ve:
+ logger.error(ve)
+ else:
+ info = remote.info()
+ content_length = info.get("content-length")
+ content_length = int(content_length) if content_length else 0
+ bytes_read = 0
+ while True:
+ buf = remote.read(GET_BLOCK_SIZE)
+ if not buf:
+ # EOF
+ break
+ bytes_read += len(buf)
+ fileobj.write(buf)
+ if progress_hook:
+ progress_hook(content_length, bytes_read)
+ remote.close()
+ fileobj.flush()
+ return bytes_read, info
+
if __name__ == "__main__":