+++ /dev/null
-#!/usr/bin/python3
-###############################################################################
-# #
-# IPFire.org - A linux based firewall #
-# Copyright (C) 2025 Michael Tremer #
-# #
-# This program is free software: you can redistribute it and/or modify #
-# it under the terms of the GNU General Public License as published by #
-# the Free Software Foundation, either version 3 of the License, or #
-# (at your option) any later version. #
-# #
-# This program is distributed in the hope that it will be useful, #
-# but WITHOUT ANY WARRANTY; without even the implied warranty of #
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
-# GNU General Public License for more details. #
-# #
-# You should have received a copy of the GNU General Public License #
-# along with this program. If not, see <http://www.gnu.org/licenses/>. #
-# #
-###############################################################################
-
-import argparse
-import calendar
-import collections
-import datetime
-import email.message
-import email.utils
-import logging
-import reportlab
-import reportlab.lib.styles
-import reportlab.platypus
-import socket
-import sqlite3
-import subprocess
-import tempfile
-
-from reportlab.lib.units import cm, mm
-
-log = logging.getLogger("suricata-report-generator")
-log.setLevel(logging.DEBUG)
-
-# i18n
-_ = lambda x: x
-
-def row_factory(cursor, row):
- """
- This is a custom row factory that makes all fields accessible as attributes.
- """
- # Create a new class with all fields
- cls = collections.namedtuple("Row", [column for column, *args in cursor.description])
-
- # Parse the row data
- return cls._make(row)
-
-class ReportGenerator(object):
- """
- This is the main class that handles all the things...
- """
- def __init__(self, path):
- self.path = path
-
- # Open the database
- self.db = sqlite3.connect(path)
- self.db.row_factory = row_factory
-
- # Load a default stylesheet for our document
- self.styles = reportlab.lib.styles.getSampleStyleSheet()
-
- # Allow to center content
- centered = reportlab.lib.styles.ParagraphStyle(
- name = "Centered",
- parent = self.styles["Normal"],
- alignment = reportlab.lib.enums.TA_CENTER,
- )
- self.styles.add(centered)
-
- def generate(self, output, year, month, week, day):
- """
- Generates a PDF report.
- """
- log.debug("Generating report %s..." % output)
-
- today = datetime.date.today()
-
- # Daily reports
- if year and month and day:
- try:
- date = datetime.date(year, month, day)
- except ValueError as e:
- log.error("Invalid date: %s-%s-%s" % (year, month, day))
- raise SystemExit(2)
-
- # Start and end date are the same day
- date_start = date_end = date
-
- # Monthly reports
- elif year and month:
- date_start = datetime.date(year, month, 1)
-
- # Determine the last day
- first_weekday, last_day = calendar.monthrange(year, month)
- date_end = datetime.date(year, month, last_day)
-
- # Cap to today
- date_end = min(date_end, today)
-
- # Weekly reports
- elif year and week:
- date_start = datetime.date.fromisocalendar(year, week, 1)
- date_end = datetime.date.fromisocalendar(year, week, 7)
-
- # Cap to today
- date_end = min(date_end, today)
-
- # Yearly reports
- elif year:
- date_start = datetime.date(year, 1, 1)
- date_end = datetime.date(year, 12, 31)
-
- # Cap to today
- date_end = min(date_end, today)
-
- # Log the dates
- log.debug(" Dates: %s - %s" % (date_start, date_end))
-
- # Create a new PDF document
- doc = reportlab.platypus.SimpleDocTemplate(
- output, pagesize=reportlab.lib.pagesizes.A4,
-
- # Decrease the margins
- leftMargin=5 * mm, rightMargin=5 * mm, topMargin=10 * mm, bottomMargin=15 * mm,
- )
-
- # Collect everything that should go on the document
- elements = []
-
- # Create the title page
- self._make_titlepage(elements, date_start, date_end)
-
- # Add detailed alerts
- self._make_alerts(elements, date_start, date_end, width=doc.width)
-
- # Render the document
- doc.build(elements, onLaterPages=self._make_page_number)
-
- def _make_page_number(self, canvas, doc):
- # Fetch the current page number
- number = canvas.getPageNumber()
-
- # Set the font
- canvas.setFont(self.styles["Normal"].fontName, 9)
-
- # Write the page number to the right hand bottom
- canvas.drawRightString(200 * mm, 10 * mm, _("Page %s") % number)
-
- def _make_titlepage(self, elements, date_start, date_end):
- """
- Generates the title page of the report
- """
- date_format = "%d %B %Y"
-
- # What time is it right now?
- now = datetime.datetime.now()
-
- # Leave some space at the top
- elements.append(reportlab.platypus.Spacer(1, 6 * cm))
-
- # Show the title
- elements.append(reportlab.platypus.Paragraph(
- _("IPFire Intrusion Prevention Alert Report"), self.styles["Title"],
- ))
-
- # Only show one date if this is a daily report
- if date_start == date_end:
- elements.append(
- reportlab.platypus.Paragraph(
- date_start.strftime(date_format),
- self.styles["Centered"],
- ),
- )
-
- # Otherwise show the date range
- else:
- elements.append(
- reportlab.platypus.Paragraph(
- "%s - %s" % (
- date_start.strftime(date_format),
- date_end.strftime(date_format)
- ),
- self.styles["Centered"],
- ),
- )
-
- # Leave some extra space
- elements.append(reportlab.platypus.Spacer(1, 1 * cm))
-
- # Show when this report was generated and on which host
- elements.append(
- reportlab.platypus.Paragraph(
- _("Generated on %(hostname)s on %(when)s.") % {
- "hostname" : socket.gethostname(),
- "when" : now.strftime("%d %B %Y %H:%M"),
- },
- self.styles["Centered"],
- ),
- )
-
- # End the page
- elements.append(
- reportlab.platypus.PageBreak(),
- )
-
- def _make_alerts(self, elements, date_start, date_end, **kwargs):
- """
- Called to add all alerts in the date range with all their detail.
- """
- date = date_start
-
- while date <= date_end:
- self._make_alerts_by_date(elements, date, **kwargs)
-
- # Move on to the next day
- date += datetime.timedelta(days=1)
-
- def _make_alerts_by_date(self, elements, date, *, width):
- log.debug("Rendering alerts for %s..." % date)
-
- # Fetch the alerts
- c = self.db.execute("""
- SELECT
- id,
- datetime(timestamp, 'unixepoch', 'localtime') AS timestamp,
-
- -- Basic Stuff
- (event ->> '$.src_ip') AS source_address,
- (event ->> '$.src_port') AS source_port,
- (event ->> '$.dest_ip') AS destination_address,
- (event ->> '$.dest_port') AS destination_port,
- (event ->> '$.proto') AS protocol,
- (event ->> '$.icmp_code') AS icmp_code,
- (event ->> '$.icmp_type') AS icmp_type,
-
- -- Alert Stuff
- (event ->> '$.alert.category') AS alert_category,
- (event ->> '$.alert.signature') AS alert_signature,
- (event ->> '$.alert.signature_id') AS alert_signature_id,
- (event ->> '$.alert.severity') AS alert_severity,
- (event ->> '$.alert.action') AS alert_action,
- (event ->> '$.alert.gid') AS alert_gid,
- (event ->> '$.alert.rev') AS alert_rev
- FROM
- alerts
- WHERE
- date(timestamp, 'unixepoch', 'localtime') = ?
- ORDER BY
- timestamp ASC,
- id ASC
- """, (date.isoformat(),))
-
- # Start the table with the header
- rows = [
- (_("Time"), _("Signature"), _("Protocol"), _("Source / Destination"))
- ]
-
- while True:
- row = c.fetchone()
- if row is None:
- break
-
- # Parse the timestamp
- t = datetime.datetime.strptime(row.timestamp, "%Y-%m-%d %H:%M:%S")
-
- # Append the row
- rows.append((
- t.strftime("%H:%M:%S"),
- "%s %s\n[%s:%s:%s] - %s" % (
- "*" * row.alert_severity,
- row.alert_signature,
- row.alert_gid,
- row.alert_signature_id,
- row.alert_rev,
- row.alert_category,
- ),
- row.protocol,
- "%s:%s\n%s:%s" % (
- row.source_address, (row.source_port or row.icmp_code),
- row.destination_address, (row.destination_port or row.icmp_type),
- ),
- ))
-
- # Skip if we have found no data
- if len(rows) == 1:
- log.debug("Skipping %s, because we don't have any data" % date)
- return
-
- # Add a headline
- elements.append(
- reportlab.platypus.Paragraph(
- _("Alerts from %s") % date.strftime("%A, %d %B %Y"),
- self.styles["Heading2"],
- )
- )
-
- # Create the table
- table = reportlab.platypus.Table(rows,
- # Set the widths of the rows
- colWidths=(
- width * 0.1, width * 0.6, width * 0.1, width * 0.2,
- ),
-
- # Repeat the header after a page break
- repeatRows=1,
- )
-
- # Style the table
- table.setStyle(
- reportlab.platypus.TableStyle((
- # Make the grid slightly grey
- ("GRID", (0, 0), (-1, -1), 0.25, reportlab.lib.colors.grey),
-
- # Align all content to the top left corners of the cells
- ("ALIGN", (0, 0), (-1, -1), "LEFT"),
- ("ALIGN", (0, 0), (0, -1), "CENTER"),
- ("ALIGN", (2, 0), (2, -1), "CENTER"),
- ("VALIGN", (0, 0), (-1, -1), "TOP"),
-
- # Chose a much smaller font size
- ("FONTSIZE", (0, 0), (-1, -1), 8),
-
- # Alternate the background colours of the rows
- ("ROWBACKGROUNDS", (0, 1), (-1, -1), [
- reportlab.lib.colors.white,
- reportlab.lib.colors.lightgrey,
- ]),
- )),
- )
-
- # Append the table to the output
- elements.append(table)
-
- # End the page
- elements.append(
- reportlab.platypus.PageBreak(),
- )
-
- def email(self, recipients, sender, **kwargs):
- """
- Generates an email with the report
- """
- log.debug("Sending an email from %s to %s" % (sender, recipients))
-
- # Fetch the hostname
- hostname = socket.gethostname()
-
- # Create a new message
- msg = email.message.EmailMessage()
-
- # Set the sender
- msg.add_header("From", sender)
-
- # Add them to the email
- msg.add_header("To", ", ".join(recipients))
-
- # Set the Subject
- msg.add_header(
- "Subject", "[REPORT] Intrusion Prevention System Alerts from %s" % hostname,
- ),
-
- # Compose the content
- content = [
- _("To whom it may concern,"),
- "",
- _("The IPFire Intrusion Preventsion System is sending you the attached report."),
- ]
-
- # Add the content to the email
- msg.set_content("\n".join(content))
-
- # Generate the report & attach it to the email
- with tempfile.NamedTemporaryFile() as f:
- # Generate
- self.generate(output=f.name, **kwargs)
-
- # Attach
- msg.add_attachment(
- f.read(), maintype="application", subtype="pdf", filename="report.pdf",
- )
-
- # Show the email
- log.debug(msg.as_string())
-
- # Send the email
- p = subprocess.Popen(
- ["/usr/sbin/sendmail", "-t", "-oi", "-f", sender],
- text=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- # Pipe the email into sendmail
- stdout, stderr = p.communicate(msg.as_string())
-
- if not p.returncode == 0:
- log.error("Failed to send email. sendmail returned %s:" % p.returncode)
- if stdout:
- log.error(stdout)
-
- log.debug("Successfully send email to %s" % ", ".join(recipients))
-
-
-def setup_logging(loglevel=logging.INFO):
- log.setLevel(loglevel)
-
- # Write everything to the console
- handler = logging.StreamHandler()
- log.addHandler(handler)
-
- handler.setLevel(loglevel)
-
- return log
-
-def main():
- parser = argparse.ArgumentParser(description="Reporter Generator for Suricata")
-
- # Command Line Arguments
- parser.add_argument("--verbose", "-v", action="count", help="Be more verbose")
- parser.add_argument("--database", help="Database",
- default="/var/log/suricata/reporter.db")
-
- # Require some output parameters
- group = parser.add_mutually_exclusive_group(required=True)
- group.add_argument("--output", "-o", help=_("Output Path"))
- group.add_argument("--email-recipient", nargs="*", dest="recipients",
- help=_("Send the report to these recipients (multiple possible)")
- )
-
- parser.add_argument("--email-sender", dest="sender", help=_("Email Sender"))
-
- # Select the time
- parser.add_argument("--year", type=int, required=True,
- help=_("Year of the report (e.g. 2025)"))
- parser.add_argument("--month", type=int, choices=range(1, 13),
- help=_("Month of the report (1-12)"))
-
- # We can only use --week or --day, but never both
- group = parser.add_mutually_exclusive_group()
- group.add_argument("--day", type=int, choices=range(1, 32),
- help=_("Day of the month (1-31)"))
- group.add_argument("--week", type=int, choices=range(1, 54),
- help=_("ISO calendar week number (1-53)"))
-
- # Parse command line arguments
- args = parser.parse_args()
-
- # Check if we have an email sender
- if args.recipients and not args.sender:
- parser.error("--email-sender= is required if recipients have been passed")
-
- # Setup logging
- loglevel = logging.WARN
-
- if args.verbose:
- if args.verbose == 1:
- loglevel = logging.INFO
- elif args.verbose >= 2:
- loglevel = logging.DEBUG
-
- setup_logging(loglevel=loglevel)
-
- # Create the report
- generator = ReportGenerator(args.database)
-
- # Generate!
- if args.output:
- generator.generate(
- output = args.output,
- year = args.year,
- month = args.month,
- week = args.week,
- day = args.day,
- )
-
- # Email!
- elif args.recipients:
- generator.email(
- recipients = args.recipients,
- sender = args.sender,
- year = args.year,
- month = args.month,
- week = args.week,
- day = args.day,
- )
-
-if __name__ == "__main__":
- main()
+++ /dev/null
-#!/usr/bin/python3
-###############################################################################
-# #
-# IPFire.org - A linux based firewall #
-# Copyright (C) 2025 Michael Tremer #
-# #
-# This program is free software: you can redistribute it and/or modify #
-# it under the terms of the GNU General Public License as published by #
-# the Free Software Foundation, either version 3 of the License, or #
-# (at your option) any later version. #
-# #
-# This program is distributed in the hope that it will be useful, #
-# but WITHOUT ANY WARRANTY; without even the implied warranty of #
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
-# GNU General Public License for more details. #
-# #
-# You should have received a copy of the GNU General Public License #
-# along with this program. If not, see <http://www.gnu.org/licenses/>. #
-# #
-###############################################################################
-
-import argparse
-import asyncio
-import configparser
-import datetime
-import email.message
-import email.utils
-import grp
-import json
-import logging
-import logging.handlers
-import multiprocessing
-import os
-import pwd
-import queue
-import signal
-import socket
-import sqlite3
-import subprocess
-import sys
-
-# Fetch the hostname
-HOSTNAME = socket.gethostname()
-
-# Email Settings
-EMAIL_FROM = "IPFire Intrusion Prevention System <%s>"
-
-log = logging.getLogger("suricata-reporter")
-log.setLevel(logging.DEBUG)
-
-# i18n
-_ = lambda x: x
-
-class Reporter(object):
- """
- This is the main class that handles all the things...
- """
- def __init__(self, config_path):
- self.config_path = config_path
-
- # Parse the configuration file
- self.config = self.read_config()
-
- # Fetch the current event loop
- self.loop = asyncio.get_running_loop()
-
- # Have we terminated?
- self.is_terminated = asyncio.Event()
-
- # Create an events queue
- self.queue = multiprocessing.Queue(1024)
-
- # Keep references to our workers
- self.workers = []
-
- # Register any signals
- for signo in (signal.SIGINT, signal.SIGTERM):
- self.loop.add_signal_handler(signo, self.terminate)
-
- # Reload the configuration on SIGHUP
- self.loop.add_signal_handler(signal.SIGHUP, self.reload)
-
- # Create the socket
- self.sock = self._create_socket()
-
- def read_config(self):
- """
- Reads or re-reads the configuration.
- """
- config = configparser.ConfigParser()
- config.read(self.config_path)
-
- return config
-
- @property
- def socket_path(self):
- return self.config.get("DEFAULT", "socket",
- fallback="/var/run/suricata/reporter.socket")
-
- def get_socket_owner(self):
- # Fetch the user/group from the configuration
- uname = self.config.get("DEFAULT", "user", fallback="suricata")
- gname = self.config.get("DEFAULT", "group", fallback="suricata")
-
- # Fetch the user and group
- try:
- user = pwd.getpwnam(uname)
- except KeyError:
- user = None
-
- try:
- group = grp.getgrnam(gname)
- except KeyError:
- group = None
-
- # Return a tuple with the desired user/group IDs
- return (user.pw_uid if user else -1, group.gr_gid if group else -1)
-
- def _create_socket(self):
- """
- Creates a new socket to receive messages on
- """
- # Create a new, non-blocking UNIX datagram socket
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM|socket.SOCK_NONBLOCK)
-
- # Bind to the some path
- try:
- sock.bind(self.socket_path)
- except OSError as e:
- log.error("Failed to bind to socket: %s" % e)
-
- # Terminate immediately
- raise SystemExit(1)
-
- # Fetch the socket owner
- uid, gid = self.get_socket_owner()
-
- # Adjust the ownership
- os.chown(self.socket_path, uid, gid)
-
- # Call something whenever we receive data on the socket
- self.loop.add_reader(sock.fileno(), self._receive_message, sock)
-
- # Return the socket
- return sock
-
- def launch_workers(self):
- """
- Launches workers...
- """
- cpu_count = multiprocessing.cpu_count()
-
- # Fetch CPU count
- workers = self.config.getint("DEFAULT", "workers", fallback=cpu_count)
-
- # Reset workers if a negative number was provided
- if workers < 1:
- workers = cpu_count
-
- # Create as many workers as we have processors
- self.workers = [
- Worker(reporter=self) for _ in range(workers)
- ]
-
- # Start them all immediately
- for worker in self.workers:
- worker.start()
-
- def terminate_workers(self):
- """
- Terminates all running workers.
- """
- log.debug("Terminating workers...")
-
- # Terminate all workers
- for worker in self.workers:
- worker.terminate()
-
- log.debug("Waiting for all workers to terminate...")
-
- # Wait until all workers have terminated
- for worker in self.workers:
- worker.join()
-
- log.debug("All workers have terminated...")
-
- # Reset the workers
- self.workers = []
-
- async def run(self):
- """
- The main loop of the application.
- """
- log.debug("Starting reporter...")
-
- # Launch all workers
- self.launch_workers()
-
- # Wait until we have terminated
- await self.is_terminated.wait()
-
- # Remove the socket so we won't receive any more data
- try:
- os.unlink(self.socket_path)
- except OSError as e:
- log.error("Failed to remove %s: %s" % (self.socket_path, e))
-
- # Close the queue
- self.queue.close()
-
- # Terminate all workers
- self.terminate_workers()
-
- log.debug("Reporter has exited")
-
- def terminate(self):
- """
- Called when the reporter is supposed to terminate.
- """
- log.debug("Terminating...")
-
- # We are no longer running
- self.is_terminated.set()
-
- def reload(self):
- """
- Called on SIGHUP.
- """
- log.info("Reloading...")
-
- # Re-read the configuration
- self.config = self.read_config()
-
- # Terminate all workers
- self.terminate_workers()
-
- # Launch a new set of workers
- self.launch_workers()
-
- def _receive_message(self, sock):
- """
- Called when there is some socket activity.
-
- It will read the entire datagram and push it into the queue.
- """
- # Read the data from the socket
- data, _ = sock.recvfrom(65535)
-
- # Push the data straight into the queue
- try:
- self.queue.put(data, block=False)
-
- # Log a message if the queue is full
- except queue.Full as e:
- log.warning("Failed to push event into the queue. The queue seems to be full.")
-
- # Ignore if the queue has been closed
- except ValueError:
- pass
-
-
-class Worker(multiprocessing.Process):
- def __init__(self, reporter):
- super().__init__()
-
- # Store the reporter
- self.reporter = reporter
-
- # Open the database
- self.db = self._open_database()
-
- @property
- def config(self):
- """
- Proxy to access the configuration file
- """
- return self.reporter.config
-
- def _open_database(self):
- """
- Opens the database
- """
- # Fetch the path
- path = self.config.get("DEFAULT", "database",
- fallback="/var/log/suricata/reporter.db")
-
- # Open the database
- db = sqlite3.connect(path)
-
- # Enable the write-ahead-log
- db.execute("PRAGMA journal_mode = WAL")
-
- # Create the schema
- db.executescript("""
- -- Create the main table
- CREATE TABLE IF NOT EXISTS alerts (
- id INTEGER PRIMARY KEY,
-
- -- Store the timestamp
- timestamp INTEGER NOT NULL,
-
- -- Store the entire JSON object
- event JSONB NOT NULL
- );
-
- -- Index alerts by their timestamp
- CREATE INDEX IF NOT EXISTS alerts_timestamp ON alerts(timestamp);
- """)
-
- return db
-
- def run(self):
- """
- This is the main entry point for workers...
- """
- log.debug("Worker %s launched" % self.pid)
-
- # Reset signal handlers
- for signo in (signal.SIGINT, signal.SIGTERM):
- signal.signal(signo, signal.SIG_DFL)
-
- # Loop for forever
- while True:
- try:
- event = self.reporter.queue.get(block=True)
-
- # If the queue has been closed, we immediately exit
- except ValueError:
- break
-
- # Parse the event
- try:
- event = Event(event)
-
- # Skip any events we could not decode
- except ValueError as e:
- log.warning("Failed to decode event: %s" % e)
- continue
-
- # Log the event
- #log.debug("Received event in worker %s: %s" % (self.pid, event))
-
- # Process the event
- try:
- self.process(event)
-
- # Log any exceptions, but keep going
- except Exception as e:
- log.error("Failed to process: %s" % e)
-
- # Optimize the database before exiting
- log.debug("Optimizing the database")
-
- self.db.execute("PRAGMA optimize")
- self.db.execute("PRAGMA wal_checkpoint = TRUNCATE")
-
- log.debug("Worker %s terminated" % self.pid)
-
- def process(self, event):
- """
- Called whenever we have received an event
- """
- # Process by type
- if event.type == "alert":
- return self.process_alert(event)
-
- # We don't care about anything else for now
- return
-
- def process_alert(self, event):
- """
- Called to process alerts
- """
- # Log the event
- log.debug("Received alert: %s" % event)
-
- # Write the event to the database
- self.db.execute("INSERT INTO alerts(timestamp, event) VALUES(?, ?)",
- (event.timestamp.timestamp(), event.json))
- self.db.commit()
-
- # Send to syslog
- if self.config.getboolean("syslog", "enabled", fallback=False):
- self.send_to_syslog(event)
-
- # Send an email
- if self.config.getboolean("email", "enabled", fallback=False):
- self.send_alert_email(event)
-
- def send_to_syslog(self, event):
- """
- Sends the event to the local syslog server in fast.log format
- """
- log.warning(event.fast_log)
-
- def send_alert_email(self, event):
- """
- Generates a new email with the alert
- """
- # Create a new message
- msg = email.message.EmailMessage()
-
- # Fetch the sender
- email_from = self.config.get("email", "sender", fallback=None)
- if email_from is None:
- email_from = "no-reply@%s" % HOSTNAME
-
- # Set the sender
- msg.add_header("From", EMAIL_FROM % email_from)
-
- # Fetch the recipients
- email_recipients = self.config.get("email", "recipients", fallback=None)
- if email_recipients is None:
- log.error("Cannot send alert emails because no recipients have been configured.")
- return
-
- # Split the recipients
- email_recipients = email.utils.getaddresses([email_recipients])
-
- # Add them to the email
- msg.add_header("To", ", ".join(
- email.utils.formataddr(recipient) for recipient in email_recipients)
- )
-
- # Set the Subject
- msg.add_header("Subject", "[ALERT][%s] %s %s - %s" % (HOSTNAME,
- "*" * event.alert_severity, event.alert_signature, event.alert_category))
-
- # Add the timestamp as Date: header
- msg.add_header("Date", email.utils.format_datetime(event.timestamp))
-
- # Generate a Message ID
- msg.add_header("Message-ID", email.utils.make_msgid())
-
- # Compose the content
- content = [
- _("To whom it may concern,"),
- "",
- _("The IPFire Intrusion Preventsion System has raised the following alert:"),
- "",
- " %-20s : %s" % (_("Signature"), event.alert_signature),
- " %-20s : %s" % (_("Category"), event.alert_category),
- " %-20s : %s" % (_("Severity"), event.alert_severity),
- " %-20s : %s" % (_("Timestamp"),
- event.timestamp.strftime("%A, %d %B %Y at %H:%M:%S %Z")),
- " %-20s : %s:%s" % (_("Source"),
- event.source_address, event.source_port or event.icmp_code),
- " %-20s : %s:%s" % (_("Destination"),
- event.destination_address, event.destination_port or event.icmp_type),
- " %-20s : %s" % (_("Protocol"), event.protocol),
- "",
- ]
-
- # Show if something was blocked
- if event.alert_action == "blocked":
- content += (
- _("The threat was blocked."), "",
- )
-
- # Add the content to the email
- msg.set_content("\n".join(content))
-
- # Log the generated email
- log.debug(msg.as_string())
-
- # Send the email
- p = subprocess.Popen(
- ["/usr/sbin/sendmail", "-t", "-oi", "-f", email_from],
- text=True,
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- )
-
- # Pipe the email into sendmail
- stdout, stderr = p.communicate(msg.as_string())
-
- if not p.returncode == 0:
- log.error("Failed to send email. sendmail returned %s:" % p.returncode)
- if stdout:
- log.error(stdout)
-
- log.debug("Successfully send email to %s" % \
- ", ".join(address for name, address in email_recipients))
-
-
-class Event(object):
- def __init__(self, event):
- # Parse the event
- try:
- self.data = json.loads(event)
-
- # Raise some ValueError if we could not decode the input
- except json.JSONDecodeError as e:
- raise ValueError("%s" % e) from e
-
- def __str__(self):
- return "%s" % self.data
-
- @property
- def json(self):
- """
- Returns all the data serialised as JSON
- """
- return json.dumps(self.data)
-
- @property
- def type(self):
- return self.data.get("event_type")
-
- @property
- def timestamp(self):
- t = self.data.get("timestamp")
-
- # Parse the timestamp
- return datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f%z")
-
- @property
- def source_address(self):
- return self.data.get("src_ip")
-
- @property
- def source_port(self):
- return self.data.get("src_port", None)
-
- @property
- def destination_address(self):
- return self.data.get("dest_ip")
-
- @property
- def destination_port(self):
- return self.data.get("dest_port", None)
-
- @property
- def protocol(self):
- return self.data.get("proto")
-
- @property
- def icmp_code(self):
- return self.data.get("icmp_code", None)
-
- @property
- def icmp_type(self):
- return self.data.get("icmp_type", None)
-
- # Alert Stuff
-
- @property
- def alert(self):
- return self.data.get("alert")
-
- @property
- def alert_category(self):
- return self.alert.get("category")
-
- @property
- def alert_signature(self):
- return self.alert.get("signature")
-
- @property
- def alert_signature_id(self):
- return self.alert.get("signature_id")
-
- @property
- def alert_severity(self):
- return self.alert.get("severity", 0)
-
- @property
- def alert_action(self):
- return self.alert.get("action")
-
- @property
- def alert_gid(self):
- return self.alert.get("gid")
-
- @property
- def alert_rev(self):
- return self.alert.get("rev")
-
- @property
- def fast_log(self):
- """
- Returns the event in a human-readable way (like fast.log)
- """
- s = []
-
- # Show if we dropped the packet
- if self.alert_action == "blocked":
- s.append("[Drop]")
-
- # Add some stars to make it pretty
- s.append("[**]")
-
- # Show which signature created the alert
- s.append("%s:%s:%s" % (self.alert_gid, self.alert_signature_id, self.alert_rev))
-
- # Show the signature
- s.append("%s" % self.alert_signature)
-
- # More stars
- s.append("[**]")
-
- # Classification
- s.append("[Classification: %s]" % self.alert_category)
-
- # Priority
- s.append("[Priority: %s]" % self.alert_severity)
-
- # Protocol
- s.append("{%s}" % self.protocol)
-
- # Source and Destination Addresses
- s.append("%s:%s -> %s:%s" % (
- self.source_address,
- self.source_port or self.icmp_code,
- self.destination_address,
- self.destination_port or self.icmp_type,
- ))
-
- return " ".join(s)
-
-def setup_logging(loglevel=logging.INFO):
- log.setLevel(loglevel)
-
- # Log to syslog by default
- handler = logging.handlers.SysLogHandler(address="/dev/log", facility="local5")
- log.addHandler(handler)
-
- # Format everything
- formatter = logging.Formatter("%(name)s[%(process)d]: %(message)s")
- handler.setFormatter(formatter)
-
- handler.setLevel(loglevel)
-
- # Write everything to the console, too
- handler = logging.StreamHandler()
- log.addHandler(handler)
-
- handler.setLevel(loglevel)
-
- return log
-
-async def main():
- parser = argparse.ArgumentParser(description="Reporter Service for Suricata")
-
- # Command Line Arguments
- parser.add_argument("--verbose", "-v", action="count", help="Be more verbose")
- parser.add_argument("--config", "-c",
- help="Configuration File", default="/etc/suricata/reporter.conf")
-
- # Parse command line arguments
- args = parser.parse_args()
-
- # Setup logging
- loglevel = logging.WARN
-
- if args.verbose:
- if args.verbose == 1:
- loglevel = logging.INFO
- elif args.verbose >= 2:
- loglevel = logging.DEBUG
-
- setup_logging(loglevel=loglevel)
-
- # Create the repoert
- reporter = Reporter(args.config)
-
- # Run!
- await reporter.run()
-
-if __name__ == "__main__":
- asyncio.run(main())