From: Michael Tremer Date: Wed, 7 Jan 2026 11:37:18 +0000 (+0000) Subject: ids-functions.pl: Implement extracting any data from tarballs X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=f0b43241a501f7c545e3cb15f6989e945c60b3e2;p=ipfire-2.x.git ids-functions.pl: Implement extracting any data from tarballs Suricata rulesets are distributed as tarballs. Besides the rules, those tarballs may contain additional data like datasets and so on. This data was not extracted before. For the IPFire DNSBL we are shipping any domains as a separate file which is being parsed by Suricata as a dataset. Obviously these files need to be extracted to be read by Suricata. This patch extracts any data files in the first place and later copies them into the rules directory. Signed-off-by: Michael Tremer --- diff --git a/config/cfgroot/ids-functions.pl b/config/cfgroot/ids-functions.pl index 14212930e..bede5fca0 100644 --- a/config/cfgroot/ids-functions.pl +++ b/config/cfgroot/ids-functions.pl @@ -22,6 +22,8 @@ ############################################################################ use strict; +use File::Copy; +use File::Spec; package IDS; @@ -391,7 +393,7 @@ sub extractruleset ($) { my $destination; # Splitt the packed file into chunks. - my $file = fileparse($packed_file); + my ($file, $path) = fileparse($packed_file); # Handle msg-id.map file. if ("$file" eq "sid-msg.map") { @@ -447,6 +449,13 @@ sub extractruleset ($) { # Set extract destination to temporaray rules_dir. $destination = "$tmp_rules_directory/$rulesfilename"; + + # Extract any datasets in the datasets/ sub-directory + } elsif ($path eq "datasets/") { + $destination = "$tmp_rules_directory/$path/$file"; + + # Ensure the directory exists + mkdir("$tmp_rules_directory/$path") unless (-d "$tmp_rules_directory/$path"); } else { # Skip all other files. return; @@ -514,6 +523,7 @@ sub process_ruleset(@) { # Array to store the extracted rulefile from the temporary rules directory. my @extracted_rulefiles; + my @extracted_datafiles; # Get names of the extracted raw rulefiles. opendir(DIR, $tmp_rules_directory) or die "Could not read from $tmp_rules_directory. $!\n"; @@ -522,7 +532,11 @@ sub process_ruleset(@) { next if $file =~ /^\.\.?$/; # Add file to the array of extracted files. - push(@extracted_rulefiles, $file); + if ($file =~ m/\.rules$/) { + push(@extracted_rulefiles, $file); + } else { + push(@extracted_datafiles, $file), + } } # Close directory handle. @@ -619,6 +633,32 @@ sub process_ruleset(@) { close(TMP_RULEFILE); } } + + # Copy all extracted data files + foreach my $datafile (@extracted_datafiles) { + my $src = File::Spec->catfile($tmp_rules_directory, $datafile); + my $dst = File::Spec->catfile($rulespath, $datafile); + + # If we found a directory, we will descend into it + if (-d $src) { + # Find all files that need to be copied + opendir(DIR, $src); + while (my $file = readdir(DIR)) { + next if ($file eq "." || $file eq ".."); + + push(@extracted_datafiles, "$datafile/$file"); + } + closedir(DIR); + + # Create the destination + mkdir($dst) unless (-d $dst); + + next; + } + + # Copy the content + File::Copy::copy($src, $dst) or die "Failed to copy datafile $src -> $dst: $!\n"; + } } #