From: Jason Ish Date: Tue, 29 Mar 2022 06:00:48 +0000 (-0600) Subject: datasets: fix path handling issues X-Git-Tag: 1.3.0rc1~27 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=858cb41f7b4a5442ce4ce65c56768c84633f6c69;p=thirdparty%2Fsuricata-update.git datasets: fix path handling issues --- diff --git a/suricata/update/main.py b/suricata/update/main.py index c558991..f0e0c4a 100644 --- a/suricata/update/main.py +++ b/suricata/update/main.py @@ -424,26 +424,32 @@ def manage_classification(suriconf, files): def handle_dataset_files(rule, dep_files): if not rule.enabled: return - load_attr = [el.strip() for el in rule.dataset.split(",") if el.startswith("load")] - state_attr = [el.strip() for el in rule.dataset.split(",") if el.startswith("state")] - if not load_attr and not state_attr: - return - if load_attr and state_attr: - logger.error("Invalid dataset rule") + + dataset_load = [el.strip() for el in rule.dataset.split(",") if el.startswith("load")] + if not dataset_load: + # No dataset load found. return - elif load_attr: - ds_attr = load_attr[0] - elif state_attr: - ds_attr = state_attr[0] + dataset_filename = dataset_load[0].split(maxsplit=1)[1].strip() - dataset_fname = os.path.basename(ds_attr.split(" ")[1]) - filename = [fname for fname, content in dep_files.items() if fname == dataset_fname] - if filename: - logger.debug("Copying dataset file %s to output directory" % dataset_fname) - with open(os.path.join(config.get_output_dir(), dataset_fname), "w+") as fp: - fp.write(dep_files[dataset_fname].decode("utf-8")) + # Get the directory name the rule is from. + prefix = os.path.dirname(rule.group) + + # Construct the source filename. + source_filename = "{}/{}".format(prefix, dataset_filename) + + if source_filename in dep_files: + dest_filename = os.path.join(config.get_output_dir(), dataset_filename) + dest_dir = os.path.dirname(dest_filename) + logger.debug("Copying dataset file {} to {}".format(dataset_filename, dest_filename)) + try: + os.makedirs(dest_dir, exist_ok=True) + except Exception as err: + logger.error("Failed to create directory {}: {}".format(dest_dir, err)) + return + with open(dest_filename, "w") as fp: + fp.write(dep_files[source_filename].decode("utf-8")) else: - logger.error("Dataset file %s was not found" % dataset_fname) + logger.error("Dataset file '{}' was not found".format(dataset_filename)) def handle_filehash_files(rule, dep_files, fhash): if not rule.enabled: