]> git.ipfire.org Git - thirdparty/suricata-update.git/commitdiff
Move dataset and filemd5 files along with rules
authorShivani Bhardwaj <shivanib134@gmail.com>
Mon, 23 Mar 2020 21:40:51 +0000 (03:10 +0530)
committerJason Ish <jason.ish@oisf.net>
Sun, 2 Aug 2020 05:11:46 +0000 (23:11 -0600)
Closes redmine tickets 2688 and 3528.

suricata/update/main.py

index 746bc1b23dd666a3a1917f30c56983c449d482b6..147007f4a147a883fe185b026011859f90b80489 100644 (file)
@@ -31,6 +31,7 @@ import glob
 import io
 import tempfile
 import signal
+import errno
 
 try:
     # Python 3.
@@ -92,6 +93,9 @@ DEFAULT_OUTPUT_RULE_FILENAME = "suricata.rules"
 
 INDEX_EXPIRATION_TIME = 60 * 60 * 24 * 14
 
+# Rule keywords that come with files
+file_kw = ["filemd5", "filesha1", "filesha256", "dataset"]
+
 class Fetch:
 
     def __init__(self):
@@ -247,7 +251,6 @@ def load_filters(filename):
     return filters
 
 def load_drop_filters(filename):
-    
     matchers = load_matchers(filename)
     filters = []
 
@@ -414,7 +417,41 @@ def manage_classification(suriconf, files):
     except (OSError, IOError) as err:
         logger.error(err)
 
-def write_merged(filename, rulemap):
+def handle_dataset_files(rule, dep_files):
+    if not rule.enabled:
+        return
+    load_attr = [el for el in rule.dataset.split(",") if "load" in el][0]
+    dataset_fname = os.path.basename(load_attr.split(" ")[1])
+    filename = [fname for fname, content in dep_files.items() if fname == dataset_fname]
+    if filename:
+        logger.debug("Copying dataset file %s to output directory" % dataset_fname)
+        with open(os.path.join(config.get_output_dir(), dataset_fname), "w+") as fp:
+            fp.write(dep_files[dataset_fname].decode("utf-8"))
+    else:
+        logger.error("Dataset file %s was not found" % dataset_fname)
+
+def handle_filehash_files(rule, dep_files, fhash):
+    if not rule.enabled:
+        return
+    filehash_fname = rule.get(fhash)
+    filename = [fname for fname, content in dep_files.items() if os.path.join(*(fname.split(os.path.sep)[1:])) == filehash_fname]
+    if filename:
+        logger.debug("Copying %s file %s to output directory" % (fhash, filehash_fname))
+        filepath = os.path.join(config.get_state_dir(), os.path.dirname(filename[0]))
+        logger.debug("filepath: %s" % filepath)
+        try:
+            os.makedirs(filepath)
+        except OSError as oserr:
+            if oserr.errno != errno.EEXIST:
+                logger.error(oserr)
+                sys.exit(1)
+        logger.debug("output fname: %s" % os.path.join(filepath, os.path.basename(filehash_fname)))
+        with open(os.path.join(filepath, os.path.basename(filehash_fname)), "w+") as fp:
+            fp.write(dep_files[os.path.join("rules", filehash_fname)].decode("utf-8"))
+    else:
+        logger.error("%s file %s was not found" % (fhash, filehash_fname))
+
+def write_merged(filename, rulemap, dep_files):
 
     if not args.quiet:
         # List of rule IDs that have been added.
@@ -432,6 +469,7 @@ def write_merged(filename, rulemap):
                     removed.append(rule)
                 elif rule.format() != rulemap[rule.id].format():
                     modified.append(rulemap[rule.id])
+
         for key in rulemap:
             if not key in oldset:
                 added.append(key)
@@ -445,12 +483,19 @@ def write_merged(filename, rulemap):
                         len(added),
                         len(removed),
                         len(modified)))
-    
     with io.open(filename, encoding="utf-8", mode="w") as fileobj:
-        for rule in rulemap:
-            print(rulemap[rule].format(), file=fileobj)
-
-def write_to_directory(directory, files, rulemap):
+        for sid in rulemap:
+            rule = rulemap[sid]
+            for kw in file_kw:
+                if kw in rule:
+                    if "dataset" == kw:
+                        handle_dataset_files(rule, dep_files)
+                    else:
+                        handle_filehash_files(rule, dep_files, kw)
+
+            print(rule.format(), file=fileobj)
+
+def write_to_directory(directory, files, rulemap, dep_files):
     # List of rule IDs that have been added.
     added = []
     # List of rule objects that have been removed.
@@ -498,6 +543,12 @@ def write_to_directory(directory, files, rulemap):
                 if not rule:
                     content.append(line.strip())
                 else:
+                    for kw in file_kw:
+                        if kw in rule:
+                            if "dataset" == kw:
+                                handle_dataset_files(rule, dep_files)
+                            else:
+                                handle_filehash_files(rule, dep_files, kw)
                     content.append(rulemap[rule.id].format())
             io.open(outpath, encoding="utf-8", mode="w").write(
                 u"\n".join(content))
@@ -1085,11 +1136,13 @@ def _main():
 
     rules = []
     classification_files = []
+    dep_files = {}
     for filename in sorted(files):
         if "classification.config" in filename:
             classification_files.append((filename, files[filename]))
             continue
         if not filename.endswith(".rules"):
+            dep_files.update({filename: files[filename]})
             continue
         logger.debug("Parsing %s." % (filename))
         rules += rule_mod.parse_fileobj(io.BytesIO(files[filename]), filename)
@@ -1169,13 +1222,13 @@ def _main():
         output_filename = os.path.join(
             config.get_output_dir(), DEFAULT_OUTPUT_RULE_FILENAME)
         file_tracker.add(output_filename)
-        write_merged(os.path.join(output_filename), rulemap)
+        write_merged(os.path.join(output_filename), rulemap, dep_files)
     else:
         for filename in files:
             file_tracker.add(
                 os.path.join(
                     config.get_output_dir(), os.path.basename(filename)))
-        write_to_directory(config.get_output_dir(), files, rulemap)
+        write_to_directory(config.get_output_dir(), files, rulemap, dep_files)
 
     manage_classification(suriconf, classification_files)