X-Git-Url: http://git.ipfire.org/?a=blobdiff_plain;f=config%2Fcfgroot%2Fids-functions.pl;h=9ea9b69dc78076f58a74377c41966f37325643c1;hb=15832b10c20212fe80aa5ba41521a4ad69965bb2;hp=89ad90c2e3c292bbdca99ac0ecc239e976776f85;hpb=fd2dccaabb2e28cf875d7d81c7faf90f7941f56b;p=people%2Fstevee%2Fipfire-2.x.git diff --git a/config/cfgroot/ids-functions.pl b/config/cfgroot/ids-functions.pl index 89ad90c2e3..9ea9b69dc7 100644 --- a/config/cfgroot/ids-functions.pl +++ b/config/cfgroot/ids-functions.pl @@ -21,14 +21,24 @@ # # ############################################################################ +use strict; + package IDS; require '/var/ipfire/general-functions.pl'; +require "${General::swroot}/network-functions.pl"; +require "${General::swroot}/suricata/ruleset-sources-new"; # Location where all config and settings files are stored. our $settingsdir = "${General::swroot}/suricata"; -# File where the used rulefiles are stored. +# File where the main file for providers ruleset inclusion exists. +our $suricata_used_providers_file = "$settingsdir/suricata-used-providers.yaml"; + +# File for static ruleset inclusions. +our $suricata_static_rulefiles_file = "$settingsdir/suricata-static-included-rulefiles.yaml"; + +# DEPRECATED - File where the used rulefiles are stored. our $used_rulefiles_file = "$settingsdir/suricata-used-rulefiles.yaml"; # File where the addresses of the homenet are stored. @@ -37,27 +47,39 @@ our $homenet_file = "$settingsdir/suricata-homenet.yaml"; # File where the addresses of the used DNS servers are stored. our $dns_servers_file = "$settingsdir/suricata-dns-servers.yaml"; -# File which contains the enabled sids. +# File where the HTTP ports definition is stored. +our $http_ports_file = "$settingsdir/suricata-http-ports.yaml"; + +# DEPRECATED - File which contains the enabled sids. our $enabled_sids_file = "$settingsdir/oinkmaster-enabled-sids.conf"; -# File which contains the disabled sids. +# DEPRECATED - File which contains the disabled sids. our $disabled_sids_file = "$settingsdir/oinkmaster-disabled-sids.conf"; +# File which contains includes for provider specific rule modifications. +our $oinkmaster_provider_includes_file = "$settingsdir/oinkmaster-provider-includes.conf"; + # File which contains wheater the rules should be changed. our $modify_sids_file = "$settingsdir/oinkmaster-modify-sids.conf"; # File which stores the configured IPS settings. our $ids_settings_file = "$settingsdir/settings"; -# File which stores the configured rules-settings. +# DEPRECATED - File which stores the configured rules-settings. our $rules_settings_file = "$settingsdir/rules-settings"; +# File which stores the used and configured ruleset providers. +our $providers_settings_file = "$settingsdir/providers-settings"; + # File which stores the configured settings for whitelisted addresses. our $ignored_file = "$settingsdir/ignored"; -# Location and name of the tarball which contains the ruleset. +# DEPRECATED - Location and name of the tarball which contains the ruleset. our $rulestarball = "/var/tmp/idsrules.tar.gz"; +# Location where the downloaded rulesets are stored. +our $dl_rules_path = "/var/tmp"; + # File to store any errors, which also will be read and displayed by the wui. our $storederrorfile = "/tmp/ids_storederror"; @@ -67,6 +89,15 @@ our $ids_page_lock_file = "/tmp/ids_page_locked"; # Location where the rulefiles are stored. our $rulespath = "/var/lib/suricata"; +# Location of the classification file. +our $classification_file = "$rulespath/classification.config"; + +# Location of the sid to msg mappings file. +our $sid_msg_file = "$rulespath/sid-msg.map"; + +# Location to store local rules. This file will not be touched. +our $local_rules_file = "$rulespath/local.rules"; + # File which contains the rules to whitelist addresses on suricata. our $whitelist_file = "$rulespath/whitelist.rules"; @@ -80,34 +111,106 @@ our $idspidfile = "/var/run/suricata.pid"; # Location of suricatactrl. my $suricatactrl = "/usr/local/bin/suricatactrl"; +# Prefix for each downloaded ruleset. +my $dl_rulesfile_prefix = "idsrules"; + +# Temporary directory where the rulesets will be extracted. +my $tmp_directory = "/tmp/ids_tmp"; + +# Temporary directory where the extracted rules files will be stored. +my $tmp_rules_directory = "$tmp_directory/rules"; + +# Temporary directory where the extracted additional config files will be stored. +my $tmp_conf_directory = "$tmp_directory/conf"; + # Array with allowed commands of suricatactrl. my @suricatactrl_cmds = ( 'start', 'stop', 'restart', 'reload', 'fix-rules-dir', 'cron' ); # Array with supported cron intervals. my @cron_intervals = ('off', 'daily', 'weekly' ); +# Array which contains the HTTP ports, which statically will be declared as HTTP_PORTS in the +# http_ports_file. +my @http_ports = ('80', '81'); + +# Array which contains a list of rulefiles which always will be included if they exist. +my @static_included_rulefiles = ('local.rules', 'whitelist.rules' ); + +# Hash which allows to convert the download type (dl_type) to a file suffix. +my %dl_type_to_suffix = ( + "archive" => ".tar.gz", + "plain" => ".rules", +); + # ## Function to check and create all IDS related files, if the does not exist. # sub check_and_create_filelayout() { # Check if the files exist and if not, create them. - unless (-f "$enabled_sids_file") { &create_empty_file($enabled_sids_file); } - unless (-f "$disabled_sids_file") { &create_empty_file($disabled_sids_file); } + unless (-f "$oinkmaster_provider_includes_file") { &create_empty_file($oinkmaster_provider_includes_file); } unless (-f "$modify_sids_file") { &create_empty_file($modify_sids_file); } - unless (-f "$used_rulefiles_file") { &create_empty_file($used_rulefiles_file); } + unless (-f "$suricata_used_providers_file") { &create_empty_file($suricata_used_providers_file); } unless (-f "$ids_settings_file") { &create_empty_file($ids_settings_file); } - unless (-f "$rules_settings_file") { &create_empty_file($rules_settings_file); } + unless (-f "$providers_settings_file") { &create_empty_file($providers_settings_file); } unless (-f "$ignored_file") { &create_empty_file($ignored_file); } unless (-f "$whitelist_file" ) { &create_empty_file($whitelist_file); } } +# +## Function to get a list of all available ruleset providers. +## +## They will be returned as a sorted array. +# +sub get_ruleset_providers() { + my @providers; + + # Loop through the hash of providers. + foreach my $provider ( keys %IDS::Ruleset::Providers ) { + # Add the provider to the array. + push(@providers, $provider); + } + + # Sort and return the array. + return sort(@providers); +} + +# +## Function to get a list of all enabled ruleset providers. +## +## They will be returned as an array. +# +sub get_enabled_providers () { + my %used_providers = (); + + # Array to store the enabled providers. + my @enabled_providers = (); + + # Read-in the providers config file. + &General::readhasharray("$providers_settings_file", \%used_providers); + + # Loop through the hash of used_providers. + foreach my $id (keys %used_providers) { + # Skip disabled providers. + next unless ($used_providers{$id}[3] eq "enabled"); + + # Grab the provider handle. + my $provider = "$used_providers{$id}[0]"; + + # Add the provider to the array of enabled providers. + push(@enabled_providers, $provider); + } + + # Return the array. + return @enabled_providers; +} + # ## Function for checking if at least 300MB of free disk space are available ## on the "/var" partition. # sub checkdiskspace () { # Call diskfree to gather the free disk space of /var. - my @df = `/bin/df -B M /var`; + my @df = &General::system_output("/bin/df", "-B", "M", "/var"); # Loop through the output. foreach my $line (@df) { @@ -136,32 +239,39 @@ sub checkdiskspace () { } # -## This function is responsible for downloading the configured IDS ruleset. +## This function is responsible for downloading the configured IDS rulesets or if no one is specified +## all configured rulesets will be downloaded. ## -## * At first it obtains from the stored rules settings which ruleset should be downloaded. -## * The next step is to get the download locations for all available rulesets. -## * After that, the function will check if an upstream proxy should be used and grab the settings. -## * The last step will be to generate the final download url, by obtaining the URL for the desired -## ruleset, add the settings for the upstream proxy and final grab the rules tarball from the server. +## * At first it gathers all configured ruleset providers, initialize the downloader and sets an +## upstream proxy if configured. +## * After that, the given ruleset or in case all rulesets should be downloaded, it will determine wether it +## is enabled or not. +## * The next step will be to generate the final download url, by obtaining the URL for the desired +## ruleset, add the settings for the upstream proxy. +## * Finally the function will grab all the rules files or tarballs from the servers. # -sub downloadruleset { - # Get rules settings. - my %rulessettings=(); - &General::readhash("$rules_settings_file", \%rulessettings); +sub downloadruleset ($) { + my ($provider) = @_; + + # If no provider is given default to "all". + $provider //= 'all'; + + # Hash to store the providers and access id's, for which rules should be downloaded. + my %sheduled_providers = (); + + # Get used provider settings. + my %used_providers = (); + &General::readhasharray("$providers_settings_file", \%used_providers); # Check if a ruleset has been configured. - unless($rulessettings{'RULES'}) { + unless(%used_providers) { # Log that no ruleset has been configured and abort. - &_log_to_syslog("No ruleset source has been configured."); + &_log_to_syslog("No ruleset provider has been configured."); # Return "1". return 1; } - # Get all available ruleset locations. - my %rulesetsources=(); - &General::readhash($rulesetsourcesfile, \%rulesetsources); - # Read proxysettings. my %proxysettings=(); &General::readhash("${General::swroot}/proxy/settings", \%proxysettings); @@ -193,40 +303,100 @@ sub downloadruleset { $downloader->proxy(['http', 'https'], $proxy_url); } - # Grab the right url based on the configured vendor. - my $url = $rulesetsources{$rulessettings{'RULES'}}; + # Loop through the hash of configured providers. + foreach my $id ( keys %used_providers ) { + # Skip providers which are not enabled. + next if ($used_providers{$id}[3] ne "enabled"); - # Check if the vendor requires an oinkcode and add it if needed. - $url =~ s/\/$rulessettings{'OINKCODE'}/g; + # Obtain the provider handle. + my $provider_handle = $used_providers{$id}[0]; - # Abort if no url could be determined for the vendor. - unless ($url) { - # Log error and abort. - &_log_to_syslog("Unable to gather a download URL for the selected ruleset."); - return 1; + # Handle update off all providers. + if (($provider eq "all") || ($provider_handle eq "$provider")) { + # Add provider handle and it's id to the hash of sheduled providers. + $sheduled_providers{$provider_handle} = $id; + } } - # Variable to store the filesize of the remote object. - my $remote_filesize; + # Loop through the hash of sheduled providers. + foreach my $provider ( keys %sheduled_providers) { + # Grab the download url for the provider. + my $url = $IDS::Ruleset::Providers{$provider}{'dl_url'}; - # The sourcfire (snort rules) does not allow to send "HEAD" requests, so skip this check - # for this webserver. - # - # Check if the ruleset source contains "snort.org". - unless ($url =~ /\.snort\.org/) { - # Pass the requrested url to the downloader. - my $request = HTTP::Request->new(HEAD => $url); + # Check if the provider requires a subscription. + if ($IDS::Ruleset::Providers{$provider}{'requires_subscription'} eq "True") { + # Grab the previously stored access id for the provider from hash. + my $id = $sheduled_providers{$provider}; + + # Grab the subscription code. + my $subscription_code = $used_providers{$id}[1]; + + # Add the subscription code to the download url. + $url =~ s/\/$subscription_code/g; + + } + + # Abort if no url could be determined for the provider. + unless ($url) { + # Log error and abort. + &_log_to_syslog("Unable to gather a download URL for the selected ruleset provider."); + return 1; + } + + # Variable to store the filesize of the remote object. + my $remote_filesize; + + # The sourcfire (snort rules) does not allow to send "HEAD" requests, so skip this check + # for this webserver. + # + # Check if the ruleset source contains "snort.org". + unless ($url =~ /\.snort\.org/) { + # Pass the requrested url to the downloader. + my $request = HTTP::Request->new(HEAD => $url); + + # Accept the html header. + $request->header('Accept' => 'text/html'); - # Accept the html header. - $request->header('Accept' => 'text/html'); + # Perform the request and fetch the html header. + my $response = $downloader->request($request); + + # Check if there was any error. + unless ($response->is_success) { + # Obtain error. + my $error = $response->status_line(); + + # Log error message. + &_log_to_syslog("Unable to download the ruleset. \($error\)"); + + # Return "1" - false. + return 1; + } - # Perform the request and fetch the html header. - my $response = $downloader->request($request); + # Assign the fetched header object. + my $header = $response->headers(); + + # Grab the remote file size from the object and store it in the + # variable. + $remote_filesize = $header->content_length; + } + + # Load perl module to deal with temporary files. + use File::Temp; + + # Generate temporary file name, located in "/var/tmp" and with a suffix of ".tmp". + my $tmp = File::Temp->new( SUFFIX => ".tmp", DIR => "/var/tmp/", UNLINK => 0 ); + my $tmpfile = $tmp->filename(); + + # Pass the requested url to the downloader. + my $request = HTTP::Request->new(GET => $url); + + # Perform the request and save the output into the tmpfile. + my $response = $downloader->request($request, $tmpfile); # Check if there was any error. unless ($response->is_success) { # Obtain error. - my $error = $response->status_line(); + my $error = $response->content; # Log error message. &_log_to_syslog("Unable to download the ruleset. \($error\)"); @@ -235,84 +405,194 @@ sub downloadruleset { return 1; } - # Assign the fetched header object. - my $header = $response->headers(); - - # Grab the remote file size from the object and store it in the - # variable. - $remote_filesize = $header->content_length; - } + # Load perl stat module. + use File::stat; - # Load perl module to deal with temporary files. - use File::Temp; + # Perform stat on the tmpfile. + my $stat = stat($tmpfile); - # Generate temporary file name, located in "/var/tmp" and with a suffix of ".tar.gz". - my $tmp = File::Temp->new( SUFFIX => ".tar.gz", DIR => "/var/tmp/", UNLINK => 0 ); - my $tmpfile = $tmp->filename(); + # Grab the local filesize of the downloaded tarball. + my $local_filesize = $stat->size; - # Pass the requested url to the downloader. - my $request = HTTP::Request->new(GET => $url); + # Check if both file sizes match. + if (($remote_filesize) && ($remote_filesize ne $local_filesize)) { + # Log error message. + &_log_to_syslog("Unable to completely download the ruleset. "); + &_log_to_syslog("Only got $local_filesize Bytes instead of $remote_filesize Bytes. "); - # Perform the request and save the output into the tmpfile. - my $response = $downloader->request($request, $tmpfile); + # Delete temporary file. + unlink("$tmpfile"); - # Check if there was any error. - unless ($response->is_success) { - # Obtain error. - my $error = $response->content; + # Return "1" - false. + return 1; + } - # Log error message. - &_log_to_syslog("Unable to download the ruleset. \($error\)"); + # Genarate and assign file name and path to store the downloaded rules file. + my $dl_rulesfile = &_get_dl_rulesfile($provider); - # Return "1" - false. - return 1; - } + # Check if a file name could be obtained. + unless ($dl_rulesfile) { + # Log error message. + &_log_to_syslog("Unable to store the downloaded rules file. "); - # Load perl stat module. - use File::stat; + # Delete downloaded temporary file. + unlink("$tmpfile"); - # Perform stat on the tmpfile. - my $stat = stat($tmpfile); + # Return "1" - false. + } - # Grab the local filesize of the downloaded tarball. - my $local_filesize = $stat->size; + # Load file copy module, which contains the move() function. + use File::Copy; - # Check if both file sizes match. - if (($remote_filesize) && ($remote_filesize ne $local_filesize)) { - # Log error message. - &_log_to_syslog("Unable to completely download the ruleset. "); - &_log_to_syslog("Only got $local_filesize Bytes instead of $remote_filesize Bytes. "); + # Overwrite the may existing rulefile or tarball with the downloaded one. + move("$tmpfile", "$dl_rulesfile"); # Delete temporary file. unlink("$tmpfile"); - # Return "1" - false. - return 1; + # Set correct ownership for the tarball. + set_ownership("$dl_rulesfile"); } - # Load file copy module, which contains the move() function. - use File::Copy; + # If we got here, everything worked fine. Return nothing. + return; +} + +# +## Function to extract a given ruleset. +# +sub extractruleset ($) { + my ($provider) = @_; - # Overwrite existing rules tarball with the new downloaded one. - move("$tmpfile", "$rulestarball"); + # Load perl module to deal with archives. + use Archive::Tar; - # Set correct ownership for the rulesdir and files. - set_ownership("$rulestarball"); + # Load perl module to deal with files and path. + use File::Basename; - # If we got here, everything worked fine. Return nothing. - return; + # Get full path and downloaded rulesfile for the given provider. + my $tarball = &_get_dl_rulesfile($provider); + + # Check if the file exists. + unless (-f $tarball) { + &_log_to_syslog("Could not extract ruleset file: $tarball"); + + # Return nothing. + return; + } + + # Check if the temporary directories exist, otherwise create them. + mkdir("$tmp_directory") unless (-d "$tmp_directory"); + mkdir("$tmp_rules_directory") unless (-d "$tmp_rules_directory"); + mkdir("$tmp_conf_directory") unless (-d "$tmp_conf_directory"); + + # Initialize the tar module. + my $tar = Archive::Tar->new($tarball); + + # Get the filelist inside the tarball. + my @packed_files = $tar->list_files; + + # Loop through the filelist. + foreach my $packed_file (@packed_files) { + my $destination; + + # Splitt the packed file into chunks. + my $file = fileparse($packed_file); + + # Handle msg-id.map file. + if ("$file" eq "sid-msg.map") { + # Set extract destination to temporary config_dir. + $destination = "$tmp_conf_directory/$provider\-sid-msg.map"; + # Handle classification.conf + } elsif ("$file" eq "classification.config") { + # Set extract destination to temporary config_dir. + $destination = "$tmp_conf_directory/$provider\-classification.config"; + # Handle rules files. + } elsif ($file =~ m/\.rules$/) { + my $rulesfilename; + + # Splitt the filename into chunks. + my @filename = split("-", $file); + + # Reverse the array. + @filename = reverse(@filename); + + # Get the amount of elements in the array. + my $elements = @filename; + + # Remove last element of the hash. + # It contains the vendor name, which will be replaced. + if ($elements >= 3) { + # Remove last element from hash. + pop(@filename); + } + + # Check if the last element of the filename does not + # contain the providers name. + if ($filename[-1] ne "$provider") { + # Add provider name as last element. + push(@filename, $provider); + } + + # Reverse the array back. + @filename = reverse(@filename); + + # Generate the name for the rulesfile. + $rulesfilename = join("-", @filename); + + # Set extract destination to temporaray rules_dir. + $destination = "$tmp_rules_directory/$rulesfilename"; + } else { + # Skip all other files. + next; + } + + # Extract the file to the temporary directory. + $tar->extract_file("$packed_file", "$destination"); + } } # -## A tiny wrapper function to call the oinkmaster script. +## A wrapper function to call the oinkmaster script, setup the rules structues and +## call the functions to merge the additional config files. (classification, sid-msg, etc.). # sub oinkmaster () { + # Load perl module for file copying. + use File::Copy; + # Check if the files in rulesdir have the correct permissions. &_check_rulesdir_permissions(); - # Cleanup the rules directory before filling it with the new rulest. + # Cleanup the rules directory before filling it with the new rulests. &_cleanup_rulesdir(); + # Get all enabled providers. + my @enabled_providers = &get_enabled_providers(); + + # Loop through the array of enabled providers. + foreach my $provider (@enabled_providers) { + # Omit the type (dl_type) of the stored ruleset. + my $type = $IDS::Ruleset::Providers{$provider}{'dl_type'}; + + # Handle the different ruleset types. + if ($type eq "archive") { + # Call the extractruleset function. + &extractruleset($provider); + } elsif ($type eq "plain") { + # Generate filename and full path for the stored rulesfile. + my $dl_rulesfile = &_get_dl_rulesfile($provider); + + # Generate destination filename an full path. + my $destination = "$tmp_rules_directory/$provider\-ruleset.rules"; + + # Copy the file into the temporary rules directory. + copy($dl_rulesfile, $destination); + } else { + # Skip unknown type. + next; + } + } + # Load perl module to talk to the kernel syslog. use Sys::Syslog qw(:DEFAULT setlogsock); @@ -320,7 +600,7 @@ sub oinkmaster () { openlog('oinkmaster', 'cons,pid', 'user'); # Call oinkmaster to generate ruleset. - open(OINKMASTER, "/usr/local/bin/oinkmaster.pl -s -u file://$rulestarball -C $settingsdir/oinkmaster.conf -o $rulespath 2>&1 |") or die "Could not execute oinkmaster $!\n"; + open(OINKMASTER, "/usr/local/bin/oinkmaster.pl -s -u dir://$tmp_rules_directory -C $settingsdir/oinkmaster.conf -o $rulespath 2>&1 |") or die "Could not execute oinkmaster $!\n"; # Log output of oinkmaster to syslog. while() { @@ -337,6 +617,181 @@ sub oinkmaster () { # Close the log handle. closelog(); + + # Call function to merge the classification files. + &merge_classifications(@enabled_providers); + + # Call function to merge the sid to message mapping files. + &merge_sid_msg(@enabled_providers); + + # Cleanup temporary directory. + &cleanup_tmp_directory(); +} + +# +## Function to merge the classifications for a given amount of providers and write them +## to the classifications file. +# +sub merge_classifications(@) { + my @providers = @_; + + # Hash to store all collected classifications. + my %classifications = (); + + # Loop through the given array of providers. + foreach my $provider (@providers) { + # Generate full path to classification file. + my $classification_file = "$tmp_conf_directory/$provider\-classification.config"; + + # Skip provider if no classification file exists. + next unless (-f "$classification_file"); + + # Open the classification file. + open(CLASSIFICATION, $classification_file) or die "Could not open file $classification_file. $!\n"; + + # Loop through the file content. + while() { + # Parse the file and grab the classification details. + if ($_ =~/.*config classification\: (.*)/) { + # Split the grabbed details. + my ($short_name, $short_desc, $priority) = split("\,", $1); + + # Check if the grabbed classification is allready known and the priority value is greater + # than the stored one (which causes less priority in the IDS). + if (($classifications{$short_name}) && ($classifications{$short_name}[1] >= $priority)) { + #Change the priority value to the stricter one. + $classifications{$short_name} = [ "$classifications{$short_name}[0]", "$priority" ]; + } else { + # Add the classification to the hash. + $classifications{$short_name} = [ "$short_desc", "$priority" ]; + } + } + } + + # Close the file. + close(CLASSIFICATION); + } + + # Open classification file for writing. + open(FILE, ">", "$classification_file") or die "Could not write to $classification_file. $!\n"; + + # Print notice about autogenerated file. + print FILE "#Autogenerated file. Any custom changes will be overwritten!\n\n"; + + # Sort and loop through the hash of classifications. + foreach my $key (sort keys %classifications) { + # Assign some nice variable names for the items. + my $short_name = $key; + my $short_desc = $classifications{$key}[0]; + my $priority = $classifications{$key}[1]; + + # Write the classification to the file. + print FILE "config classification: $short_name,$short_desc,$priority\n"; + } + + # Close file handle. + close(FILE); +} + +# +## Function to merge the "sid to message mapping" files of various given providers. +# +sub merge_sid_msg (@) { + my @providers = @_; + + # Hash which contains all the sid to message mappings. + my %mappings = (); + + # Loop through the array of given providers. + foreach my $provider (@providers) { + # Generate full path and filename. + my $sid_msg_file = "$tmp_conf_directory/$provider\-sid-msg.map"; + + # Skip provider if no sid to msg mapping file for this provider exists. + next unless (-f $sid_msg_file); + + # Open the file. + open(MAPPING, $sid_msg_file) or die "Could not open $sid_msg_file. $!\n"; + + # Loop through the file content. + while () { + # Remove newlines. + chomp($_); + + # Skip lines which do not start with a number, + next unless ($_ =~ /^\d+/); + + # Split line content and assign it to an array. + my @line = split(/ \|\| /, $_); + + # Grab the first element (and remove it) from the line array. + # It contains the sid. + my $sid = shift(@line); + + # Store the grabbed sid and the remain array as hash value. + # It still contains the messages, references etc. + $mappings{$sid} = [@line]; + } + + # Close file handle. + close(MAPPING); + } + + # Open mappings file for writing. + open(FILE, ">", $sid_msg_file) or die "Could not write $sid_msg_file. $!\n"; + + # Write notice about autogenerated file. + print FILE "#Autogenerated file. Any custom changes will be overwritten!\n\n"; + + # Loop through the hash of mappings. + foreach my $sid ( sort keys %mappings) { + # Grab data for the sid. + my @data = @{$mappings{$sid}}; + + # Add the sid to the data array. + unshift(@data, $sid); + + # Generate line. + my $line = join(" \|\| ", @data); + + print FILE "$line\n"; + + } + + # Close file handle. + close(FILE); +} + +# +## A very tiny function to move an extracted ruleset from the temporary directory into +## the rules directory. +# +sub move_tmp_ruleset() { + # Load perl module. + use File::Copy; + + # Do a directory listing of the temporary directory. + opendir DH, $tmp_rules_directory; + + # Loop over all files. + while(my $file = readdir DH) { + # Move them to the rules directory. + move "$tmp_rules_directory/$file" , "$rulespath/$file"; + } + + # Close directory handle. + closedir DH; +} + +# +## Function to cleanup the temporary IDS directroy. +# +sub cleanup_tmp_directory () { + # Load rmtree() function from file path perl module. + use File::Path 'rmtree'; + + # Delete temporary directory and all containing files. + rmtree([ "$tmp_directory" ]); } # @@ -401,39 +856,44 @@ sub _store_error_message ($) { } # -## Function to get a list of all available network zones. +## Private function to get the path and filename for a downloaded ruleset by a given provider. # -sub get_available_network_zones () { - # Get netsettings. - my %netsettings = (); - &General::readhash("${General::swroot}/ethernet/settings", \%netsettings); +sub _get_dl_rulesfile($) { + my ($provider) = @_; - # Obtain the configuration type from the netsettings hash. - my $config_type = $netsettings{'CONFIG_TYPE'}; + # Gather the download type for the given provider. + my $dl_type = $IDS::Ruleset::Providers{$provider}{'dl_type'}; - # Hash which contains the conversation from the config mode - # to the existing network interface names. They are stored like - # an array. - # - # Mode "0" red is a modem and green - # Mode "1" red is a netdev and green - # Mode "2" red, green and orange - # Mode "3" red, green and blue - # Mode "4" red, green, blue, orange - my %config_type_to_interfaces = ( - "0" => [ "red", "green" ], - "1" => [ "red", "green" ], - "2" => [ "red", "green", "orange" ], - "3" => [ "red", "green", "blue" ], - "4" => [ "red", "green", "blue", "orange" ] - ); - - # Obtain and dereference the corresponding network interaces based on the read - # network config type. - my @network_zones = @{ $config_type_to_interfaces{$config_type} }; - - # Return them. - return @network_zones; + # Obtain the file suffix for the download file type. + my $suffix = $dl_type_to_suffix{$dl_type}; + + # Check if a suffix has been found. + unless ($suffix) { + # Abort return - nothing. + return; + } + + # Generate the full filename and path for the stored rules file. + my $rulesfile = "$dl_rules_path/$dl_rulesfile_prefix-$provider$suffix"; + + # Return the generated filename. + return $rulesfile; +} + +# +## Tiny function to delete the stored ruleset file or tarball for a given provider. +# +sub drop_dl_rulesfile ($) { + my ($provider) = @_; + + # Gather the full path and name of the stored rulesfile. + my $rulesfile = &_get_dl_rulesfile($provider); + + # Check if the given rulesfile exists. + if (-f $rulesfile) { + # Delete the stored rulesfile. + unlink($rulesfile) or die "Could not delete $rulesfile. $!\n"; + } } # @@ -488,7 +948,7 @@ sub call_suricatactrl ($) { # Call the suricatactrl binary and pass the "cron" command # with the requrested interval. - system("$suricatactrl $option $interval &>/dev/null"); + &General::system("$suricatactrl", "$option", "$interval"); # Return "1" - True. return 1; @@ -500,7 +960,7 @@ sub call_suricatactrl ($) { } else { # Call the suricatactrl binary and pass the requrested # option to it. - system("$suricatactrl $option &>/dev/null"); + &General::system("$suricatactrl", "$option"); # Return "1" - True. return 1; @@ -575,12 +1035,12 @@ sub _cleanup_rulesdir() { # We only want files. next unless (-f "$rulespath/$file"); - # Skip element if it has config as file extension. - next if ($file =~ m/\.config$/); - # Skip rules file for whitelisted hosts. next if ("$rulespath/$file" eq $whitelist_file); + # Skip rules file with local rules. + next if ("$rulespath/$file" eq $local_rules_file); + # Delete the current processed file, if not, exit this function # and return an error message. unlink("$rulespath/$file") or return "Could not delete $rulespath/$file. $!\n"; @@ -600,7 +1060,7 @@ sub generate_home_net_file() { &General::readhash("${General::swroot}/ethernet/settings", \%netsettings); # Get available network zones. - my @network_zones = &get_available_network_zones(); + my @network_zones = &Network::get_available_network_zones(); # Temporary array to store network address and prefix of the configured # networks. @@ -664,22 +1124,7 @@ sub generate_home_net_file() { } # Format home net declaration. - my $line = "\"\["; - - # Loop through the array of networks. - foreach my $network (@networks) { - # Add the network to the line. - $line = "$line" . "$network"; - - # Check if the current network was the last in the array. - if ($network eq $networks[-1]) { - # Close the line. - $line = "$line" . "\]\""; - } else { - # Add "," for the next network. - $line = "$line" . "\,"; - } - } + my $line = "\"[" . join(',', @networks) . "]\""; # Open file to store the addresses of the home net. open(FILE, ">$homenet_file") or die "Could not open $homenet_file. $!\n"; @@ -702,44 +1147,33 @@ sub generate_home_net_file() { # Function to generate and write the file which contains the configured and used DNS servers. # sub generate_dns_servers_file() { - # Open file which contains the current used DNS configuration. - open (FILE, "${General::swroot}/red/dns") or die "Could not read DNS configuration from ${General::swroot}/red/dns. $!\n"; - - # Read-in whole file content and store it in a temporary array. - my @file_content = split(' ', ); + # Get the used DNS servers. + my @nameservers = &General::get_nameservers(); - # Close file handle. - close(FILE); + # Get network settings. + my %netsettings; + &General::readhash("${General::swroot}/ethernet/settings", \%netsettings); # Format dns servers declaration. - my $line = "\"\["; - - # Check if the current DNS configuration is using the local recursor mode. - if ($file_content[0] eq "local" && $file_content[1] eq "recursor") { - # The responsible DNS servers on red are directly used, and because we are not able - # to specify each single DNS server address here, we currently have to thread each - # address which is not part of the HOME_NET as possible DNS server. - $line = "$line" . "!\$HOME_NET"; - - } else { - # Loop through the array which contains the file content. - foreach my $server (@file_content) { - # Remove newlines. - chomp($server); + my $line = ""; - # Add the DNS server to the line. - $line = "$line" . "$server"; + # Check if the system has configured nameservers. + if (@nameservers) { + # Add the GREEN address as DNS servers. + push(@nameservers, $netsettings{'GREEN_ADDRESS'}); - # Check if the current DNS server was the last in the array. - if ($server ne $file_content[-1]) { - # Add "," for the next DNS server. - $line = "$line" . "\,"; - } + # Check if a BLUE zone exists. + if ($netsettings{'BLUE_ADDRESS'}) { + # Add the BLUE address to the array of nameservers. + push(@nameservers, $netsettings{'BLUE_ADDRESS'}); } - } - # Close the line... - $line = "$line" . "\]\""; + # Generate the line which will be written to the DNS servers file. + $line = join(",", @nameservers); + } else { + # External net simply contains (any). + $line = "\$EXTERNAL_NET"; + } # Open file to store the used DNS server addresses. open(FILE, ">$dns_servers_file") or die "Could not open $dns_servers_file. $!\n"; @@ -752,20 +1186,69 @@ sub generate_dns_servers_file() { print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; # Print the generated DNS declaration to the file. - print FILE "DNS_SERVERS:\t$line\n"; + print FILE "DNS_SERVERS:\t\"[$line]\"\n"; # Close file handle. close(FILE); } # -## Function to generate and write the file for used rulefiles. +# Function to generate and write the file which contains the HTTP_PORTS definition. +# +sub generate_http_ports_file() { + my %proxysettings; + + # Read-in proxy settings + &General::readhash("${General::swroot}/proxy/advanced/settings", \%proxysettings); + + # Check if the proxy is enabled. + if (( -e "${General::swroot}/proxy/enable") || (-e "${General::swroot}/proxy/enable_blue")) { + # Add the proxy port to the array of HTTP ports. + push(@http_ports, $proxysettings{'PROXY_PORT'}); + } + + # Check if the transparent mode of the proxy is enabled. + if ((-e "${General::swroot}/proxy/transparent") || (-e "${General::swroot}/proxy/transparent_blue")) { + # Add the transparent proxy port to the array of HTTP ports. + push(@http_ports, $proxysettings{'TRANSPARENT_PORT'}); + } + + # Format HTTP_PORTS declaration. + my $line = ""; + + # Generate line which will be written to the http ports file. + $line = join(",", @http_ports); + + # Open file to store the HTTP_PORTS. + open(FILE, ">$http_ports_file") or die "Could not open $http_ports_file. $!\n"; + + # Print yaml header. + print FILE "%YAML 1.1\n"; + print FILE "---\n\n"; + + # Print notice about autogenerated file. + print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; + + # Print the generated HTTP_PORTS declaration to the file. + print FILE "HTTP_PORTS:\t\"[$line]\"\n"; + + # Close file handle. + close(FILE); +} + +# +## Function to generate and write the file for used rulefiles file for a given provider. +## +## The function requires as first argument a provider handle, and as second an array with files. # -sub write_used_rulefiles_file(@) { - my @files = @_; +sub write_used_provider_rulefiles_file($@) { + my ($provider, @files) = @_; + + # Get the path and file for the provider specific used rulefiles file. + my $used_provider_rulesfile_file = &get_used_provider_rulesfile_file($provider); # Open file for used rulefiles. - open (FILE, ">$used_rulefiles_file") or die "Could not write to $used_rulefiles_file. $!\n"; + open (FILE, ">$used_provider_rulesfile_file") or die "Could not write to $used_provider_rulesfile_file. $!\n"; # Write yaml header to the file. print FILE "%YAML 1.1\n"; @@ -774,9 +1257,6 @@ sub write_used_rulefiles_file(@) { # Write header to file. print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; - # Allways use the whitelist. - print FILE " - whitelist.rules\n"; - # Loop through the array of given files. foreach my $file (@files) { # Check if the given filename exists and write it to the file of used rulefiles. @@ -789,18 +1269,86 @@ sub write_used_rulefiles_file(@) { close(FILE); } +# +## Function to write the main file for provider rulesfiles inclusions. +## +## This function requires an array of provider handles. +# +sub write_main_used_rulefiles_file (@) { + my (@providers) = @_; + + # Call function to write the static rulefiles file. + &_write_static_rulefiles_file(); + + # Open file for used rulefils inclusion. + open (FILE, ">", "$suricata_used_providers_file") or die "Could not write to $suricata_used_providers_file. $!\n"; + + # Write yaml header to the file. + print FILE "%YAML 1.1\n"; + print FILE "---\n\n"; + + # Write header to file. + print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; + + # Loop through the list of given providers. + foreach my $provider (@providers) { + # Call function to get the providers used rulefiles file. + my $filename = &get_used_provider_rulesfile_file($provider); + + # Print the provider to the file. + print FILE "include\: $filename\n"; + } + + # Always include the file which hold the static includes. + print FILE "include\: $suricata_static_rulefiles_file\n"; + + # Close the filehandle after writing. + close(FILE); +} + +sub _write_static_rulefiles_file () { + # Open file. + open (FILE, ">", $suricata_static_rulefiles_file) or die "Could not write to $suricata_static_rulefiles_file. $!\n"; + + # Write yaml header to the file. + print FILE "%YAML 1.1\n"; + print FILE "---\n\n"; + + # Write notice about autogenerated file. + print FILE "#Autogenerated file. Any custom changes will be overwritten!\n"; + + # Loop through the array of static included rulesfiles. + foreach my $file (@static_included_rulefiles) { + # Check if the file exists. + if (-f "$rulespath/$file") { + # Write the rulesfile name to the file. + print FILE " - $file\n"; + } + } + + # Close the file handle + close(FILE); +} + +# +## Tiny function to generate the full path and name for the used_provider_rulesfile file of a given provider. +# +sub get_used_provider_rulesfile_file ($) { + my ($provider) = @_; + + my $filename = "$settingsdir/suricata\-$provider\-used\-rulefiles.yaml"; + + # Return the gernerated file. + return $filename; +} + # ## Function to generate and write the file for modify the ruleset. # sub write_modify_sids_file() { # Get configured settings. my %idssettings=(); - my %rulessettings=(); &General::readhash("$ids_settings_file", \%idssettings); - &General::readhash("$rules_settings_file", \%rulessettings); - - # Gather the configured ruleset. - my $ruleset = $rulessettings{'RULES'}; # Open modify sid's file for writing. open(FILE, ">$modify_sids_file") or die "Could not write to $modify_sids_file. $!\n"; @@ -817,38 +1365,58 @@ sub write_modify_sids_file() { # malware in that file. Rules which fall into the first category should stay as # alert since not all flows of that type contain malware. - if($ruleset eq 'registered' or $ruleset eq 'subscripted' or $ruleset eq 'community') { - # These types of rulesfiles contain meta-data which gives the action that should - # be used when in IPS mode. Do the following: - # - # 1. Disable all rules and set the action to 'drop' - # 2. Set the action back to 'alert' if the rule contains 'flowbits:noalert;' - # This should give rules not in the policy a reasonable default if the user - # manually enables them. - # 3. Enable rules and set actions according to the meta-data strings. + # These types of rulesfiles contain meta-data which gives the action that should + # be used when in IPS mode. Do the following: + # + # 1. Disable all rules and set the action to 'drop' + # 2. Set the action back to 'alert' if the rule contains 'flowbits:noalert;' + # This should give rules not in the policy a reasonable default if the user + # manually enables them. + # 3. Enable rules and set actions according to the meta-data strings. - my $policy = 'balanced'; # Placeholder to allow policy to be changed. + my $policy = 'balanced'; # Placeholder to allow policy to be changed. print FILE <mtime; + + # Convert into human read-able format. + my $date = strftime('%Y-%m-%d %H:%M:%S', localtime($mtime)); + + # Return the date. + return $date; +} + # ## Function to gather the version of suricata. # @@ -1055,6 +1623,53 @@ sub get_red_address() { return; } +# +## Function to get the used rules files of a given provider. +# +sub read_used_provider_rulesfiles($) { + my ($provider) = @_; + + # Array to store the used rulefiles. + my @used_rulesfiles = (); + + # Get the used rulesefile file for the provider. + my $rulesfile_file = &get_used_provider_rulesfile_file($provider); + + # Check if the used rulesfile is empty. + unless (-z $rulesfile_file) { + # Open the file or used rulefiles and read-in content. + open(FILE, $rulesfile_file) or die "Could not open $rulesfile_file. $!\n"; + + while () { + # Assign the current line to a nice variable. + my $line = $_; + + # Remove newlines. + chomp($line); + + # Skip comments. + next if ($line =~ /\#/); + + # Skip blank lines. + next if ($line =~ /^\s*$/); + + # Gather the rulefile. + if ($line =~ /.*- (.*)/) { + my $rulefile = $1; + + # Add the rulefile to the array of used rulesfiles. + push(@used_rulesfiles, $rulefile); + } + } + + # Close the file. + close(FILE); + } + + # Return the array of used rulesfiles. + return @used_rulesfiles; +} + # ## Function to write the lock file for locking the WUI, while ## the autoupdate script runs.