logger = logging.getLogger()
+
def register(parser):
parser.add_argument("name", metavar="<name>", nargs="?",
help="Name of source")
parser.add_argument("url", metavar="<url>", nargs="?", help="Source URL")
parser.add_argument("--http-header", metavar="<http-header>",
help="Additional HTTP header to add to requests")
+ parser.add_argument("--no-checksum", action="store_false",
+ help="Skips downloading the checksum URL")
parser.set_defaults(func=add_source)
+
def add_source():
args = config.args()
if url:
break
+ checksum = args.no_checksum
+
header = args.http_header if args.http_header else None
- source_config = sources.SourceConfiguration(name, header=header, url=url)
+ source_config = sources.SourceConfiguration(
+ name, header=header, url=url, checksum=checksum)
sources.save_source_config(source_config)
def fetch(self, url):
net_arg = url
- url = url[0] if isinstance(url, tuple) else url
+ checksum = url[2]
+ url = url[0]
tmp_filename = self.get_tmp_filename(url)
if config.args().offline:
if config.args().force:
"Last download less than 15 minutes ago. Not downloading %s.",
url)
return self.extract_files(tmp_filename)
- if self.check_checksum(tmp_filename, url):
- logger.info("Remote checksum has not changed. Not fetching.")
- return self.extract_files(tmp_filename)
+ if checksum:
+ if self.check_checksum(tmp_filename, url):
+ logger.info("Remote checksum has not changed. "
+ "Not fetching.")
+ return self.extract_files(tmp_filename)
if not os.path.exists(config.get_cache_dir()):
os.makedirs(config.get_cache_dir(), mode=0o770)
logger.info("Fetching %s." % (url))
urls = []
+ http_header = None
+ checksum = True
+
# Add any URLs added with the --url command line parameter.
if config.args().url:
for url in config.args().url:
- urls.append(url)
+ urls.append((url, http_header, checksum))
# Get the new style sources.
enabled_sources = sources.get_enabled_sources()
params.update(internal_params)
if "url" in source:
# No need to go off to the index.
- url = (source["url"] % params, source.get("http-header"))
+ http_header = source.get("http_header")
+ checksum = source.get("checksum")
+ url = (source["url"] % params, http_header, checksum)
logger.debug("Resolved source %s to URL %s.", name, url[0])
else:
if not index:
raise exceptions.ApplicationError(
"Source index is required for source %s; "
"run suricata-update update-sources" % (source["source"]))
- url = index.resolve_url(name, params)
+ source_config = index.get_source_by_name(name)
+ try:
+ checksum = source_config["checksum"]
+ except:
+ checksum = True
+ url = (index.resolve_url(name, params), http_header,
+ checksum)
logger.debug("Resolved source %s to URL %s.", name, url)
urls.append(url)
if type(url) not in [type("")]:
raise exceptions.InvalidConfigurationError(
"Invalid datatype for source URL: %s" % (str(url)))
- url = url % internal_params
+ url = (url % internal_params, http_header, checksum)
logger.debug("Adding source %s.", url)
urls.append(url)
if config.get("etopen") or not urls:
if not config.args().offline and not urls:
logger.info("No sources configured, will use Emerging Threats Open")
- urls.append(sources.get_etopen_url(internal_params))
+ urls.append((sources.get_etopen_url(internal_params), http_header,
+ checksum))
# Converting the URLs to a set removed dupes.
urls = set(urls)
class SourceConfiguration:
- def __init__(self, name, header=None, url=None, params={}):
+ def __init__(self, name, header=None, url=None,
+ params={}, checksum=True):
self.name = name
self.url = url
self.params = params
self.header = header
+ self.checksum = checksum
def dict(self):
d = {
d["params"] = self.params
if self.header:
d["http-header"] = self.header
+ if self.checksum:
+ d["checksum"] = self.checksum
return d
class Index: