X-Git-Url: http://git.ipfire.org/?a=blobdiff_plain;f=config%2Fcfgroot%2Fids-functions.pl;h=b81c63b6750a67a69db169cc94acc1f25866e1fa;hb=990d111d70b7f5276b5ff3b6729773f1066fcee7;hp=ed9bb203bae432e2ed27f037e3c9a2594af94f7a;hpb=50f348f681102eae5dc6d26f19292389397e77fb;p=people%2Fpmueller%2Fipfire-2.x.git diff --git a/config/cfgroot/ids-functions.pl b/config/cfgroot/ids-functions.pl index ed9bb203ba..b81c63b675 100644 --- a/config/cfgroot/ids-functions.pl +++ b/config/cfgroot/ids-functions.pl @@ -29,11 +29,41 @@ require '/var/ipfire/general-functions.pl'; require "${General::swroot}/network-functions.pl"; require "${General::swroot}/suricata/ruleset-sources"; +# Load perl module to deal with Archives. +use Archive::Tar; + +# Load perl module to deal with files and path. +use File::Basename; + +# Load module to move files. +use File::Copy; + +# Load module to recursely remove files and a folder. +use File::Path qw(rmtree); + +# Load module to get file stats. +use File::stat; + +# Load module to deal with temporary files. +use File::Temp; + +# Load module to deal with the date formats used by the HTTP protocol. +use HTTP::Date; + +# Load the libwwwperl User Agent module. +use LWP::UserAgent; + +# Load function from posix module to format time strings. +use POSIX qw (strftime); + +# Load module to talk to the kernel log daemon. +use Sys::Syslog qw(:DEFAULT setlogsock); + # Location where all config and settings files are stored. our $settingsdir = "${General::swroot}/suricata"; -# File where the used rulefiles are stored. -our $used_rulefiles_file = "$settingsdir/suricata-used-rulefiles.yaml"; +# File where the main file for providers ruleset inclusion exists. +our $suricata_used_rulesfiles_file = "$settingsdir/suricata-used-rulesfiles.yaml"; # File where the addresses of the homenet are stored. our $homenet_file = "$settingsdir/suricata-homenet.yaml"; @@ -44,29 +74,18 @@ our $dns_servers_file = "$settingsdir/suricata-dns-servers.yaml"; # File where the HTTP ports definition is stored. our $http_ports_file = "$settingsdir/suricata-http-ports.yaml"; -# File which contains the enabled sids. -our $enabled_sids_file = "$settingsdir/oinkmaster-enabled-sids.conf"; - -# File which contains the disabled sids. -our $disabled_sids_file = "$settingsdir/oinkmaster-disabled-sids.conf"; - -# File which contains wheater the rules should be changed. -our $modify_sids_file = "$settingsdir/oinkmaster-modify-sids.conf"; - # File which stores the configured IPS settings. our $ids_settings_file = "$settingsdir/settings"; -# DEPRECATED - File which stores the configured rules-settings. -our $rules_settings_file = "$settingsdir/rules-settings"; - # File which stores the used and configured ruleset providers. our $providers_settings_file = "$settingsdir/providers-settings"; # File which stores the configured settings for whitelisted addresses. our $ignored_file = "$settingsdir/ignored"; -# DEPRECATED - Location and name of the tarball which contains the ruleset. -our $rulestarball = "/var/tmp/idsrules.tar.gz"; +# File which stores HTTP Etags for providers which supports them +# for cache management. +our $etags_file = "$settingsdir/etags"; # Location where the downloaded rulesets are stored. our $dl_rules_path = "/var/tmp"; @@ -80,8 +99,14 @@ our $ids_page_lock_file = "/tmp/ids_page_locked"; # Location where the rulefiles are stored. our $rulespath = "/var/lib/suricata"; +# Location where the default rulefils are stored. +our $default_rulespath = "/usr/share/suricata/rules"; + +# Location where the addition config files are stored. +our $configspath = "/usr/share/suricata"; + # Location of the classification file. -our $classification_file = "$rulespath/classification.config"; +our $classification_file = "$configspath/classification.config"; # Location of the sid to msg mappings file. our $sid_msg_file = "$rulespath/sid-msg.map"; @@ -124,24 +149,32 @@ my @cron_intervals = ('off', 'daily', 'weekly' ); # http_ports_file. my @http_ports = ('80', '81'); +# Array which contains a list of rulefiles which always will be included if they exist. +my @static_included_rulefiles = ('local.rules', 'whitelist.rules'); + +# Array which contains a list of allways enabled application layer protocols. +my @static_enabled_app_layer_protos = ('app-layer', 'decoder', 'files', 'stream'); + # Hash which allows to convert the download type (dl_type) to a file suffix. my %dl_type_to_suffix = ( "archive" => ".tar.gz", "plain" => ".rules", ); +# Hash to translate an application layer protocol to the application name. +my %tr_app_layer_proto = ( + "ikev2" => "ipsec", + "krb5" => "kerberos", +); + # ## Function to check and create all IDS related files, if the does not exist. # sub check_and_create_filelayout() { # Check if the files exist and if not, create them. - unless (-f "$enabled_sids_file") { &create_empty_file($enabled_sids_file); } - unless (-f "$disabled_sids_file") { &create_empty_file($disabled_sids_file); } - unless (-f "$modify_sids_file") { &create_empty_file($modify_sids_file); } - unless (-f "$used_rulefiles_file") { &create_empty_file($used_rulefiles_file); } + unless (-f "$suricata_used_rulesfiles_file") { &create_empty_file($suricata_used_rulesfiles_file); } unless (-f "$ids_settings_file") { &create_empty_file($ids_settings_file); } unless (-f "$providers_settings_file") { &create_empty_file($providers_settings_file); } - unless (-f "$ignored_file") { &create_empty_file($ignored_file); } unless (-f "$whitelist_file" ) { &create_empty_file($whitelist_file); } } @@ -193,6 +226,42 @@ sub get_enabled_providers () { return @enabled_providers; } +# +## Function to get a hash of provider handles and their configured modes (IDS/IPS). +# +sub get_providers_mode () { + my %used_providers = (); + + # Hash to store the providers and their configured modes. + my %providers_mode = (); + + # Read-in the providers config file. + &General::readhasharray("$providers_settings_file", \%used_providers); + + # Loop through the hash of used_providers. + foreach my $id (keys %used_providers) { + # Skip disabled providers. + next unless ($used_providers{$id}[3] eq "enabled"); + + # Grab the provider handle. + my $provider = "$used_providers{$id}[0]"; + + # Grab the provider mode. + my $mode = "$used_providers{$id}[4]"; + + # Fall back to IDS if no mode could be obtained. + unless($mode) { + $mode = "IDS"; + } + + # Add details to provider_modes hash. + $providers_mode{$provider} = $mode; + } + + # Return the hash. + return %providers_mode; +} + # ## Function for checking if at least 300MB of free disk space are available ## on the "/var" partition. @@ -214,11 +283,8 @@ sub checkdiskspace () { # Check if the available disk space is more than 300MB. if ($available < 300) { - # Log error to syslog. - &_log_to_syslog("Not enough free disk space on /var. Only $available MB from 300 MB available."); - - # Exit function and return "1" - False. - return 1; + # Exit function and return the available disk space. + return $available; } } } @@ -228,48 +294,42 @@ sub checkdiskspace () { } # -## This function is responsible for downloading the configured IDS rulesets or if no one is specified -## all configured rulesets will be downloaded. +## This function is responsible for downloading the ruleset for a given provider. ## -## * At first it gathers all configured ruleset providers, initialize the downloader and sets an -## upstream proxy if configured. -## * After that, the given ruleset or in case all rulesets should be downloaded, it will determine wether it -## is enabled or not. +## * At first it initialize the downloader and sets an upstream proxy if configured. ## * The next step will be to generate the final download url, by obtaining the URL for the desired -## ruleset, add the settings for the upstream proxy. -## * Finally the function will grab all the rules files or tarballs from the servers. +## ruleset and add the settings for the upstream proxy. +## * Finally the function will grab the rule file or tarball from the server. +## It tries to reduce the amount of download by using the "If-Modified-Since" HTTP header. +# +## Return codes: +## +## * "no url" - If no download URL could be gathered for the provider. +## * "not modified" - In case the already stored rules file is up to date. +## * "incomplete download" - When the remote file size differs from the downloaded file size. +## * "$error" - The error message generated from the LWP::User Agent module. # sub downloadruleset ($) { my ($provider) = @_; - # If no provider is given default to "all". - $provider //= 'all'; - - # Hash to store the providers and access id's, for which rules should be downloaded. - my %sheduled_providers = (); - - # Get used provider settings. - my %used_providers = (); - &General::readhasharray("$providers_settings_file", \%used_providers); - - # Check if a ruleset has been configured. - unless(%used_providers) { - # Log that no ruleset has been configured and abort. - &_log_to_syslog("No ruleset provider has been configured."); - - # Return "1". - return 1; - } + # The amount of download attempts before giving up and + # logging an error. + my $max_dl_attempts = 3; # Read proxysettings. my %proxysettings=(); &General::readhash("${General::swroot}/proxy/settings", \%proxysettings); - # Load required perl module to handle the download. - use LWP::UserAgent; - # Init the download module. - my $downloader = LWP::UserAgent->new; + # + # Request SSL hostname verification and specify path + # to the CA file. + my $downloader = LWP::UserAgent->new( + ssl_opts => { + SSL_ca_file => '/etc/ssl/cert.pem', + verify_hostname => 1, + } + ); # Set timeout to 10 seconds. $downloader->timeout(10); @@ -292,179 +352,172 @@ sub downloadruleset ($) { $downloader->proxy(['http', 'https'], $proxy_url); } - # Loop through the hash of configured providers. - foreach my $id ( keys %used_providers ) { - # Skip providers which are not enabled. - next if ($used_providers{$id}[3] ne "enabled"); + # Grab the download url for the provider. + my $url = $IDS::Ruleset::Providers{$provider}{'dl_url'}; - # Obtain the provider handle. - my $provider_handle = $used_providers{$id}[0]; + # Check if the provider requires a subscription. + if ($IDS::Ruleset::Providers{$provider}{'requires_subscription'} eq "True") { + # Grab the subscription code. + my $subscription_code = &get_subscription_code($provider); - # Handle update off all providers. - if (($provider eq "all") || ($provider_handle eq "$provider")) { - # Add provider handle and it's id to the hash of sheduled providers. - $sheduled_providers{$provider_handle} = $id; - } - } + # Add the subscription code to the download url. + $url =~ s/\/$subscription_code/g; - # Loop through the hash of sheduled providers. - foreach my $provider ( keys %sheduled_providers) { - # Grab the download url for the provider. - my $url = $IDS::Ruleset::Providers{$provider}{'dl_url'}; + } - # Check if the provider requires a subscription. - if ($IDS::Ruleset::Providers{$provider}{'requires_subscription'} eq "True") { - # Grab the previously stored access id for the provider from hash. - my $id = $sheduled_providers{$provider}; + # Abort and return "no url", if no url could be determined for the provider. + return "no url" unless ($url); - # Grab the subscription code. - my $subscription_code = $used_providers{$id}[1]; + # Pass the requested URL to the downloader. + my $request = HTTP::Request->new(GET => $url); - # Add the subscription code to the download url. - $url =~ s/\/$subscription_code/g; + # Generate temporary file name, located in "/var/tmp" and with a suffix of ".tmp". + # The downloaded file will be stored there until some sanity checks are performed. + my $tmp = File::Temp->new( SUFFIX => ".tmp", DIR => "/var/tmp/", UNLINK => 0 ); + my $tmpfile = $tmp->filename(); - } + # Call function to get the final path and filename for the downloaded file. + my $dl_rulesfile = &_get_dl_rulesfile($provider); - # Abort if no url could be determined for the provider. - unless ($url) { - # Log error and abort. - &_log_to_syslog("Unable to gather a download URL for the selected ruleset provider."); - return 1; - } - - # Variable to store the filesize of the remote object. - my $remote_filesize; - - # The sourcfire (snort rules) does not allow to send "HEAD" requests, so skip this check - # for this webserver. - # - # Check if the ruleset source contains "snort.org". - unless ($url =~ /\.snort\.org/) { - # Pass the requrested url to the downloader. - my $request = HTTP::Request->new(HEAD => $url); - - # Accept the html header. - $request->header('Accept' => 'text/html'); + # Check if the rulesfile already exits, because it has been downloaded in the past. + # + # In this case we are requesting the server if the remote file has been changed or not. + # This will be done by sending the modification time in a special HTTP header. + if (-f $dl_rulesfile) { + # Call stat on the file. + my $stat = stat($dl_rulesfile); - # Perform the request and fetch the html header. - my $response = $downloader->request($request); + # Omit the mtime of the existing file. + my $mtime = $stat->mtime; - # Check if there was any error. - unless ($response->is_success) { - # Obtain error. - my $error = $response->status_line(); + # Convert the timestamp into right format. + my $http_date = time2str($mtime); - # Log error message. - &_log_to_syslog("Unable to download the ruleset. \($error\)"); + # Add the If-Modified-Since header to the request to ask the server if the + # file has been modified. + $request->header( 'If-Modified-Since' => "$http_date" ); + } - # Return "1" - false. - return 1; - } + # Read-in Etags file for known Etags if the file is present. + my %etags = (); + &General::readhash("$etags_file", \%etags) if (-f $etags_file); - # Assign the fetched header object. - my $header = $response->headers(); + # Check if an Etag for the current provider is stored. + if ($etags{$provider}) { + # Grab the stored tag. + my $etag = $etags{$provider}; - # Grab the remote file size from the object and store it in the - # variable. - $remote_filesize = $header->content_length; - } + # Add an "If-None-Match header to the request to ask the server if the + # file has been modified. + $request->header( 'If-None-Match' => $etag ); + } - # Load perl module to deal with temporary files. - use File::Temp; + my $dl_attempt = 1; + my $response; - # Generate temporary file name, located in "/var/tmp" and with a suffix of ".tmp". - my $tmp = File::Temp->new( SUFFIX => ".tmp", DIR => "/var/tmp/", UNLINK => 0 ); - my $tmpfile = $tmp->filename(); + # Download and retry on failure. + while ($dl_attempt <= $max_dl_attempts) { + # Perform the request and save the output into the tmpfile. + $response = $downloader->request($request, $tmpfile); - # Pass the requested url to the downloader. - my $request = HTTP::Request->new(GET => $url); + # Check if the download was successfull. + if($response->is_success) { + # Break loop. + last; - # Perform the request and save the output into the tmpfile. - my $response = $downloader->request($request, $tmpfile); + # Check if the server responds with 304 (Not Modified). + } elsif ($response->code == 304) { + # Return "not modified". + return "not modified"; - # Check if there was any error. - unless ($response->is_success) { + # Check if we ran out of download re-tries. + } elsif ($dl_attempt eq $max_dl_attempts) { # Obtain error. my $error = $response->content; - # Log error message. - &_log_to_syslog("Unable to download the ruleset. \($error\)"); - - # Return "1" - false. - return 1; + # Return the error message from response.. + return "$error"; } - # Load perl stat module. - use File::stat; + # Remove temporary file, if one exists. + unlink("$tmpfile") if (-e "$tmpfile"); - # Perform stat on the tmpfile. - my $stat = stat($tmpfile); - - # Grab the local filesize of the downloaded tarball. - my $local_filesize = $stat->size; - - # Check if both file sizes match. - if (($remote_filesize) && ($remote_filesize ne $local_filesize)) { - # Log error message. - &_log_to_syslog("Unable to completely download the ruleset. "); - &_log_to_syslog("Only got $local_filesize Bytes instead of $remote_filesize Bytes. "); - - # Delete temporary file. - unlink("$tmpfile"); + # Increase download attempt counter. + $dl_attempt++; + } - # Return "1" - false. - return 1; - } + # Obtain the connection headers. + my $headers = $response->headers; - # Genarate and assign file name and path to store the downloaded rules file. - my $dl_rulesfile = &_get_dl_rulesfile($provider); + # Get the timestamp from header, when the file has been modified the + # last time. + my $last_modified = $headers->last_modified; - # Check if a file name could be obtained. - unless ($dl_rulesfile) { - # Log error message. - &_log_to_syslog("Unable to store the downloaded rules file. "); + # Get the remote size of the downloaded file. + my $remote_filesize = $headers->content_length; - # Delete downloaded temporary file. - unlink("$tmpfile"); + # Grab the Etag from response it the server provides one. + if ($response->header('Etag')) { + # Add the Etag to the etags hash. + $etags{$provider} = $response->header('Etag'); - # Return "1" - false. - } + # Write the etags file. + &General::writehash($etags_file, \%etags); + } - # Load file copy module, which contains the move() function. - use File::Copy; + # Perform stat on the tmpfile. + my $stat = stat($tmpfile); - # Overwrite the may existing rulefile or tarball with the downloaded one. - move("$tmpfile", "$dl_rulesfile"); + # Grab the local filesize of the downloaded tarball. + my $local_filesize = $stat->size; + # Check if both file sizes match. + if (($remote_filesize) && ($remote_filesize ne $local_filesize)) { # Delete temporary file. unlink("$tmpfile"); - # Set correct ownership for the tarball. - set_ownership("$dl_rulesfile"); + # Return "1" - false. + return "incomplete download"; } + # Overwrite the may existing rulefile or tarball with the downloaded one. + move("$tmpfile", "$dl_rulesfile"); + + # Check if we got a last-modified value from the server. + if ($last_modified) { + # Assign the last-modified timestamp as mtime to the + # rules file. + utime(time(), "$last_modified", "$dl_rulesfile"); + } + + # Delete temporary file. + unlink("$tmpfile"); + + # Set correct ownership for the tarball. + set_ownership("$dl_rulesfile"); + # If we got here, everything worked fine. Return nothing. return; } # ## Function to extract a given ruleset. +## +## In case the ruleset provider offers a plain file, it simply will +## be copied. # sub extractruleset ($) { my ($provider) = @_; - # Load perl module to deal with archives. - use Archive::Tar; - - # Load perl module to deal with files and path. - use File::Basename; + # Disable chown functionality when uncompressing files. + $Archive::Tar::CHOWN = "0"; # Get full path and downloaded rulesfile for the given provider. my $tarball = &_get_dl_rulesfile($provider); # Check if the file exists. unless (-f $tarball) { - &_log_to_syslog("Could not extract ruleset file: $tarball"); + &_log_to_syslog("Could not find ruleset file: $tarball"); # Return nothing. return; @@ -475,69 +528,120 @@ sub extractruleset ($) { mkdir("$tmp_rules_directory") unless (-d "$tmp_rules_directory"); mkdir("$tmp_conf_directory") unless (-d "$tmp_conf_directory"); - # Initialize the tar module. - my $tar = Archive::Tar->new($tarball); + # Omit the type (dl_type) of the stored ruleset. + my $type = $IDS::Ruleset::Providers{$provider}{'dl_type'}; + + # Handle the different ruleset types. + if ($type eq "plain") { + # Generate destination filename an full path. + my $destination = "$tmp_rules_directory/$provider\-ruleset.rules"; + + # Copy the file into the temporary rules directory. + copy($tarball, $destination); + + } elsif ( $type eq "archive") { + # Initialize the tar module. + my $tar = Archive::Tar->new($tarball); + + # Get the filelist inside the tarball. + my @packed_files = $tar->list_files; + + # Loop through the filelist. + foreach my $packed_file (@packed_files) { + my $destination; + + # Splitt the packed file into chunks. + my $file = fileparse($packed_file); + + # Handle msg-id.map file. + if ("$file" eq "sid-msg.map") { + # Set extract destination to temporary config_dir. + $destination = "$tmp_conf_directory/$provider\-sid-msg.map"; - # Get the filelist inside the tarball. - my @packed_files = $tar->list_files; + # Handle classification.conf + } elsif ("$file" eq "classification.config") { + # Set extract destination to temporary config_dir. + $destination = "$tmp_conf_directory/$provider\-classification.config"; - # Loop through the filelist. - foreach my $packed_file (@packed_files) { - my $destination; + # Handle rules files. + } elsif ($file =~ m/\.rules$/) { + # Skip rule files which are not located in the rules directory or archive root. + next unless(($packed_file =~ /^rules\//) || ($packed_file !~ /\//)); - # Splitt the packed file into chunks. - my $file = fileparse($packed_file); + # Skip deleted.rules. + # + # Mostly they have been taken out for correctness or performance reasons and therfore + # it is not a great idea to enable any of them. + next if($file =~ m/deleted.rules$/); - # Handle msg-id.map file. - if ("$file" eq "sid-msg.map") { - # Set extract destination to temporary config_dir. - $destination = "$tmp_conf_directory/$provider\-sid-msg.map"; - # Handle classification.conf - } elsif ("$file" eq "classification.config") { - # Set extract destination to temporary config_dir. - $destination = "$tmp_conf_directory/$provider\-classification.config"; - # Handle rules files. - } elsif ($file =~ m/\.rules$/) { - my $rulesfilename; + my $rulesfilename; - # Splitt the filename into chunks. - my @filename = split("-", $file); + # Splitt the filename into chunks. + my @filename = split("-", $file); - # Reverse the array. - @filename = reverse(@filename); + # Reverse the array. + @filename = reverse(@filename); - # Get the amount of elements in the array. - my $elements = @filename; + # Get the amount of elements in the array. + my $elements = @filename; - # Remove last element of the hash. - # It contains the vendor name, which will be replaced. - if ($elements >= 3) { + # Remove last element of the hash. + # It contains the vendor name, which will be replaced. + if ($elements >= 3) { # Remove last element from hash. - pop(@filename); - } + pop(@filename); + } + + # Check if the last element of the filename does not + # contain the providers name. + if ($filename[-1] ne "$provider") { + # Add provider name as last element. + push(@filename, $provider); + } + + # Reverse the array back. + @filename = reverse(@filename); - # Check if the last element of the filename does not - # contain the providers name. - if ($filename[-1] ne "$provider") { - # Add provider name as last element. - push(@filename, $provider); + # Generate the name for the rulesfile. + $rulesfilename = join("-", @filename); + + # Set extract destination to temporaray rules_dir. + $destination = "$tmp_rules_directory/$rulesfilename"; + } else { + # Skip all other files. + next; } - # Reverse the array back. - @filename = reverse(@filename); + # Check if the destination file exists. + unless(-e "$destination") { + # Extract the file to the temporary directory. + $tar->extract_file("$packed_file", "$destination"); + } else { + # Generate temporary file name, located in the temporary rules directory and a suffix of ".tmp". + my $tmp = File::Temp->new( SUFFIX => ".tmp", DIR => "$tmp_rules_directory", UNLINK => 0 ); + my $tmpfile = $tmp->filename(); + + # Extract the file to the new temporary file name. + $tar->extract_file("$packed_file", "$tmpfile"); + + # Open the the existing file. + open(DESTFILE, ">>", "$destination") or die "Could not open $destination. $!\n"; + open(TMPFILE, "<", "$tmpfile") or die "Could not open $tmpfile. $!\n"; + + # Loop through the content of the temporary file. + while () { + # Append the content line by line to the destination file. + print DESTFILE "$_"; + } - # Generate the name for the rulesfile. - $rulesfilename = join("-", @filename); + # Close the file handles. + close(TMPFILE); + close(DESTFILE); - # Set extract destination to temporaray rules_dir. - $destination = "$tmp_rules_directory/$rulesfilename"; - } else { - # Skip all other files. - next; + # Remove the temporary file. + unlink("$tmpfile"); + } } - - # Extract the file to the temporary directory. - $tar->extract_file("$packed_file", "$destination"); } } @@ -546,9 +650,6 @@ sub extractruleset ($) { ## call the functions to merge the additional config files. (classification, sid-msg, etc.). # sub oinkmaster () { - # Load perl module for file copying. - use File::Copy; - # Check if the files in rulesdir have the correct permissions. &_check_rulesdir_permissions(); @@ -560,61 +661,139 @@ sub oinkmaster () { # Loop through the array of enabled providers. foreach my $provider (@enabled_providers) { - # Omit the type (dl_type) of the stored ruleset. - my $type = $IDS::Ruleset::Providers{$provider}{'dl_type'}; - - # Handle the different ruleset types. - if ($type eq "archive") { - # Call the extractruleset function. - &extractruleset($provider); - } elsif ($type eq "plain") { - # Generate filename and full path for the stored rulesfile. - my $dl_rulesfile = &_get_dl_rulesfile($provider); - - # Generate destination filename an full path. - my $destination = "$tmp_rules_directory/$provider\-ruleset.rules"; - - # Copy the file into the temporary rules directory. - copy($dl_rulesfile, $destination); - } else { - # Skip unknown type. - next; - } + # Call the extractruleset function. + &extractruleset($provider); } - # Load perl module to talk to the kernel syslog. - use Sys::Syslog qw(:DEFAULT setlogsock); + # Call function to process the ruleset and do all modifications. + &process_ruleset(@enabled_providers); - # Establish the connection to the syslog service. - openlog('oinkmaster', 'cons,pid', 'user'); + # Call function to merge the classification files. + &merge_classifications(@enabled_providers); - # Call oinkmaster to generate ruleset. - open(OINKMASTER, "/usr/local/bin/oinkmaster.pl -s -u dir://$tmp_rules_directory -C $settingsdir/oinkmaster.conf -o $rulespath 2>&1 |") or die "Could not execute oinkmaster $!\n"; + # Call function to merge the sid to message mapping files. + &merge_sid_msg(@enabled_providers); - # Log output of oinkmaster to syslog. - while() { - # The syslog function works best with an array based input, - # so generate one before passing the message details to syslog. - my @syslog = ("INFO", "$_"); + # Cleanup temporary directory. + &cleanup_tmp_directory(); +} + +# +## Function to alter the ruleset. +# +sub process_ruleset(@) { + my (@providers) = @_; - # Send the log message. - syslog(@syslog); + # Hash to store the configured provider modes. + my %providers_mode = &get_providers_mode(); + + # Array to store the extracted rulefile from the temporary rules directory. + my @extracted_rulefiles; + + # Get names of the extracted raw rulefiles. + opendir(DIR, $tmp_rules_directory) or die "Could not read from $tmp_rules_directory. $!\n"; + while (my $file = readdir(DIR)) { + # Ignore single and double dotted files. + next if $file =~ /^\.\.?$/; + + # Add file to the array of extracted files. + push(@extracted_rulefiles, $file); } - # Close the pipe to oinkmaster process. - close(OINKMASTER); + # Close directory handle. + closedir(DIR); - # Close the log handle. - closelog(); + # Loop through the array of providers. + foreach my $provider (@providers) { + # Hash to store the obtained SIDs and REV of each provider. + my %rules = (); - # Call function to merge the classification files. - &merge_classifications(@enabled_providers); + # Hash which holds modifications to apply to the rules. + my %modifications = (); - # Call function to merge the sid to message mapping files. - &merge_sid_msg(@enabled_providers); + # Loop through the array of extraced rulefiles. + foreach my $file (@extracted_rulefiles) { + # Skip file if it does not belong to the current processed provider. + next unless ($file =~ m/^$provider/); - # Cleanup temporary directory. - &cleanup_tmp_directory(); + # Open the rulefile. + open(FILE, "$tmp_rules_directory/$file") or die "Could not read $tmp_rules_directory/$file. $!\n"; + + # Loop through the file content. + while (my $line = ) { + # Skip blank lines. + next if ($line =~ /^\s*$/); + + # Call function to get the sid and rev of the rule. + my ($sid, $rev) = &_get_sid_and_rev($line); + + # Skip rule if a sid with a higher rev already has added to the rules hash. + next if ($rev le $rules{$sid}); + + # Add the new or rule with higher rev to the hash of rules. + $rules{$sid} = $rev; + } + + # Close file handle. + close(FILE); + } + + # Get filename which contains the ruleset modifications for this provider. + my $modification_file = &get_provider_ruleset_modifications_file($provider); + + # Read file which holds the modifications of the ruleset for the current provider. + &General::readhash($modification_file, \%modifications) if (-f $modification_file); + + # Loop again through the array of extracted rulesfiles. + foreach my $file (@extracted_rulefiles) { + # Skip the file if it does not belong to the current provider. + next unless ($file =~ m/^$provider/); + + # Open the rulefile for writing. + open(RULEFILE, ">", "$rulespath/$file") or die "Could not write to file $rulespath/$file. $!\n"; + + # Open the rulefile for reading. + open(TMP_RULEFILE, "$tmp_rules_directory/$file") or die "Could not read $tmp_rules_directory/$file. $!\n"; + + # Loop through the raw temporary rulefile. + while (my $line = ) { + # Get the sid and rev of the rule. + my ($sid, $rev) = &_get_sid_and_rev($line); + + # Check if the current rule is obsoleted by a newer one. + # + # In this case the rev number in the rules hash is higher than the current one. + next if ($rev lt $rules{$sid}); + + # Check if the rule should be enabled or disabled. + if ($modifications{$sid} eq "enabled") { + # Drop the # at the start of the line. + $line =~ s/^\#//; + } elsif ($modifications{$sid} eq "disabled") { + # Add a # at the start of the line to disable the rule. + $line = "#$line" unless ($line =~ /^#/); + } + + # Check if the Provider is set so IPS mode. + if ($providers_mode{$provider} eq "IPS") { + # Replacements for sourcefire rules. + $line =~ s/^#\s*(?:alert|drop)(.+policy balanced-ips alert)/alert${1}/; + $line =~ s/^#\s*(?:alert|drop)(.+policy balanced-ips drop)/drop${1}/; + + # Replacements for generic rules. + $line =~ s/^(#?)\s*(?:alert|drop)/${1}drop/; + $line =~ s/^(#?)\s*drop(.+flowbits:noalert;)/${1}alert${2}/; + } + + # Write line / rule to the target rule file. + print RULEFILE "$line"; + } + + # Close filehandles. + close(RULEFILE); + close(TMP_RULEFILE); + } + } } # @@ -756,9 +935,6 @@ sub merge_sid_msg (@) { ## the rules directory. # sub move_tmp_ruleset() { - # Load perl module. - use File::Copy; - # Do a directory listing of the temporary directory. opendir DH, $tmp_rules_directory; @@ -776,8 +952,6 @@ sub move_tmp_ruleset() { ## Function to cleanup the temporary IDS directroy. # sub cleanup_tmp_directory () { - # Load rmtree() function from file path perl module. - use File::Path 'rmtree'; # Delete temporary directory and all containing files. rmtree([ "$tmp_directory" ]); @@ -805,9 +979,6 @@ sub log_error ($) { sub _log_to_syslog ($) { my ($message) = @_; - # Load perl module to talk to the kernel syslog. - use Sys::Syslog qw(:DEFAULT setlogsock); - # The syslog function works best with an array based input, # so generate one before passing the message details to syslog. my @syslog = ("ERR", " $message"); @@ -869,6 +1040,27 @@ sub _get_dl_rulesfile($) { return $rulesfile; } +# +## Private function to obtain the sid and rev of a rule. +# +## Returns an array with the sid as first and the rev as second value. +# +sub _get_sid_and_rev ($) { + my ($line) = @_; + + my @ret; + + # Use regex to obtain the sid and rev. + if ($line =~ m/.*sid:\s*(.*?);.*rev:\s*(.*?);/) { + # Add the sid and rev to the array. + push(@ret, $1); + push(@ret, $2); + } + + # Return the array. + return @ret; +} + # ## Tiny function to delete the stored ruleset file or tarball for a given provider. # @@ -885,6 +1077,57 @@ sub drop_dl_rulesfile ($) { } } +# +## Function to read-in the given enabled or disables sids file. +# +sub read_enabled_disabled_sids_file($) { + my ($file) = @_; + + # Temporary hash to store the sids and their state. It will be + # returned at the end of this function. + my %temphash; + + # Open the given filename. + open(FILE, "$file") or die "Could not open $file. $!\n"; + + # Loop through the file. + while() { + # Remove newlines. + chomp $_; + + # Skip blank lines. + next if ($_ =~ /^\s*$/); + + # Skip coments. + next if ($_ =~ /^\#/); + + # Splitt line into sid and state part. + my ($state, $sid) = split(" ", $_); + + # Skip line if the sid is not numeric. + next unless ($sid =~ /\d+/ ); + + # Check if the sid was enabled. + if ($state eq "enablesid") { + # Add the sid and its state as enabled to the temporary hash. + $temphash{$sid} = "enabled"; + # Check if the sid was disabled. + } elsif ($state eq "disablesid") { + # Add the sid and its state as disabled to the temporary hash. + $temphash{$sid} = "disabled"; + # Invalid state - skip the current sid and state. + } else { + next; + } + } + + # Close filehandle. + close(FILE); + + # Return the hash. + return %temphash; +} + # ## Function to check if the IDS is running. # @@ -1226,80 +1469,173 @@ sub generate_http_ports_file() { } # -## Function to generate and write the file for used rulefiles. +## Function to write the file that contains the rulefiles which are loaded by suricaa. +## +## This function requires an array of used provider handles. # -sub write_used_rulefiles_file(@) { - my @files = @_; +sub write_used_rulefiles_file (@) { + my (@providers) = @_; + + # Get the enabled application layer protocols. + my @enabled_app_layer_protos = &get_suricata_enabled_app_layer_protos(); - # Open file for used rulefiles. - open (FILE, ">$used_rulefiles_file") or die "Could not write to $used_rulefiles_file. $!\n"; + # Open the file. + open (FILE, ">", $suricata_used_rulesfiles_file) or die "Could not write to $suricata_used_rulesfiles_file. $!\n"; - # Write yaml header to the file. print FILE "%YAML 1.1\n"; print FILE "---\n\n"; - # Write header to file. + # Write notice about autogenerated file. print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; - # Allways use the whitelist. - print FILE " - whitelist.rules\n"; + # Loop through the array of static included rulesfiles. + foreach my $file (@static_included_rulefiles) { + # Check if the file exists. + if (-f "$rulespath/$file") { + # Write the rulesfile name to the file. + print FILE " - $rulespath/$file\n"; + } + } - # Loop through the array of given files. - foreach my $file (@files) { - # Check if the given filename exists and write it to the file of used rulefiles. - if(-f "$rulespath/$file") { - print FILE " - $file\n"; + print FILE "\n#Default rules for used application layer protocols.\n"; + foreach my $enabled_app_layer_proto (@enabled_app_layer_protos) { + # Check if the current processed app layer proto needs to be translated + # into an application name. + if (exists($tr_app_layer_proto{$enabled_app_layer_proto})) { + # Obtain the translated application name for this protocol. + $enabled_app_layer_proto = $tr_app_layer_proto{$enabled_app_layer_proto}; + } + + # Generate filename. + my $rulesfile = "$default_rulespath/$enabled_app_layer_proto\.rules"; + + # Check if such a file exists. + if (-f "$rulesfile") { + # Write the rulesfile name to the file. + print FILE " - $rulesfile\n"; + } + + # Generate filename with "events" in filename. + $rulesfile = "$default_rulespath/$enabled_app_layer_proto\-events.rules"; + + # Check if this file exists. + if (-f "$rulesfile" ) { + # Write the rulesfile name to the file. + print FILE " - $rulesfile\n"; } } - # Close file after writing. + # Loop through the array of enabled providers. + foreach my $provider (@providers) { + # Get the used rulefile for this provider. + my @used_rulesfiles = &get_provider_used_rulesfiles($provider); + + # Check if there are + if(@used_rulesfiles) { + # Add notice to the file. + print FILE "\n#Used Rulesfiles for provider $provider.\n"; + + # Loop through the array of used rulefiles. + foreach my $enabled_rulesfile (@used_rulesfiles) { + # Generate name and full path to the rulesfile. + my $rulesfile = "$rulespath/$enabled_rulesfile"; + + # Write the ruelsfile name to the file. + print FILE " - $rulesfile\n"; + } + } + } + + # Close the file handle close(FILE); } # -## Function to generate and write the file for modify the ruleset. +## Tiny function to generate the full path and name for the file which stores the used rulefiles of a given provider. # -sub write_modify_sids_file() { - # Get configured settings. - my %idssettings=(); - &General::readhash("$ids_settings_file", \%idssettings); +sub get_provider_used_rulesfiles_file ($) { + my ($provider) = @_; - # Open modify sid's file for writing. - open(FILE, ">$modify_sids_file") or die "Could not write to $modify_sids_file. $!\n"; + my $filename = "$settingsdir/$provider\-used\-rulesfiles"; - # Write file header. - print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; + # Return the gernerated file. + return $filename; +} - # Check if the traffic only should be monitored. - unless($idssettings{'MONITOR_TRAFFIC_ONLY'} eq 'on') { - # Suricata is in IPS mode, which means that the rule actions have to be changed - # from 'alert' to 'drop', however not all rules should be changed. Some rules - # exist purely to set a flowbit which is used to convey other information, such - # as a specific type of file being downloaded, to other rulewhich then check for - # malware in that file. Rules which fall into the first category should stay as - # alert since not all flows of that type contain malware. - - # These types of rulesfiles contain meta-data which gives the action that should - # be used when in IPS mode. Do the following: - # - # 1. Disable all rules and set the action to 'drop' - # 2. Set the action back to 'alert' if the rule contains 'flowbits:noalert;' - # This should give rules not in the policy a reasonable default if the user - # manually enables them. - # 3. Enable rules and set actions according to the meta-data strings. - - my $policy = 'balanced'; # Placeholder to allow policy to be changed. - - print FILE <mtime; + } + + # Check if the timestamp has not been grabbed. + unless ($mtime) { + # Return N/A for Not available. + return "N/A"; + } + + # Convert into human read-able format. + $date = strftime('%Y-%m-%d %H:%M:%S', localtime($mtime)); + + # Return the date. + return $date; } # @@ -1339,6 +1675,48 @@ sub get_suricata_version($) { } } +# +## Function to get the enabled application layer protocols. +# +sub get_suricata_enabled_app_layer_protos() { + # Array to store and return the enabled app layer protos. + my @enabled_app_layer_protos = (); + + # Execute piped suricata command and return the list of + # enabled application layer protocols. + open(SURICATA, "suricata --list-app-layer-protos |") or die "Could not execute program: $!"; + + # Grab and store the list of enabled application layer protocols. + my @output = ; + + # Close pipe. + close(SURICATA); + + # Merge allways enabled static application layers protocols array. + @enabled_app_layer_protos = @static_enabled_app_layer_protos; + + # Loop through the array which contains the output of suricata. + foreach my $line (@output) { + # Skip header line which starts with "===". + next if ($line =~ /^\s*=/); + + # Skip info or warning lines. + next if ($line =~ /\s*--/); + + # Remove newlines. + chomp($line); + + # Add enabled app layer proto to the array. + push(@enabled_app_layer_protos, $line); + } + + # Sort the array. + @enabled_app_layer_protos = sort(@enabled_app_layer_protos); + + # Return the array. + return @enabled_app_layer_protos; +} + # ## Function to generate the rules file with whitelisted addresses. # @@ -1375,7 +1753,7 @@ sub generate_ignore_file() { # Check if the address/network is valid. if ((&General::validip($address)) || (&General::validipandmask($address))) { # Write rule line to the file to pass any traffic from this IP - print FILE "pass ip $address any -> any any (msg:\"pass all traffic from/to $address\"\; sid:$sid\;)\n"; + print FILE "pass ip $address any -> any any (msg:\"pass all traffic from/to $address\"\; bypass; sid:$sid\;)\n"; # Increment sid. $sid++; @@ -1509,41 +1887,37 @@ sub get_red_address() { } # -## Function to get all used rulesfiles files. +## Function to get the used rules files of a given provider. # -sub get_used_rulesfiles() { - # Array to store the used rulefiles. - my @used_rulesfiles = (); - - # Check if the used rulesfile is empty. - unless (-z $used_rulefiles_file) { - # Open the file or used rulefiles and read-in content. - open(FILE, $used_rulefiles_file) or die "Could not open $used_rulefiles_file. $!\n"; +sub get_provider_used_rulesfiles($) { + my ($provider) = @_; - while () { - # Assign the current line to a nice variable. - my $line = $_; + # Hash to store the used rulefiles of the provider. + my %provider_rulefiles = (); - # Remove newlines. - chomp($line); + # Array to store the used rulefiles. + my @used_rulesfiles = (); - # Skip comments. - next if ($line =~ /\#/); + # Get the filename which contains the used rulefiles for this provider. + my $used_rulesfiles_file = &get_provider_used_rulesfiles_file($provider); - # Skip blank lines. - next if ($line =~ /^\s*$/); + # Read-in file, if it exists. + &General::readhash("$used_rulesfiles_file", \%provider_rulefiles) if (-f $used_rulesfiles_file); - # Gather the rulefile. - if ($line =~ /.*- (.*)/) { - my $rulefile = $1; + # Loop through the hash of rulefiles which does the provider offer. + foreach my $rulefile (keys %provider_rulefiles) { + # Skip disabled rulefiles. + next unless($provider_rulefiles{$rulefile} eq "enabled"); - # Add the rulefile to the array of used rulesfiles. - push(@used_rulesfiles, $rulefile); - } - } + # The General::readhash function does not allow dots as + # key value and limits the key "string" to the part before + # the dot, in case it contains one. + # + # So add the file extension for the rules file manually again. + $rulefile = "$rulefile.rules"; - # Close the file. - close(FILE); + # Add the enabled rulefile to the array of enabled rulefiles. + push(@used_rulesfiles, $rulefile); } # Return the array of used rulesfiles.