]> git.ipfire.org Git - collecty.git/commitdiff
Add a simply backup mechanism
authorMichael Tremer <michael.tremer@ipfire.org>
Mon, 21 Sep 2020 14:22:28 +0000 (14:22 +0000)
committerMichael Tremer <michael.tremer@ipfire.org>
Mon, 21 Sep 2020 14:22:28 +0000 (14:22 +0000)
This is now called through DBUS and I am not sure whether
that is a good idea or not.

The upsides are that the client will never access files
directory, the downside is that the backup process blocks
the DBUS thread.

Signed-off-by: Michael Tremer <michael.tremer@ipfire.org>
src/collecty/bus.py
src/collecty/client.py
src/collecty/daemon.py

index 8ad28fc96d1a24d8569fa8d3e00270eb947c73cf..ef56e2ca5183079de12d32bf609a01e3e96fe5bd 100644 (file)
@@ -77,6 +77,10 @@ class GraphGenerator(dbus.service.Object):
 
                self.collecty = collecty
 
+       @dbus.service.method(BUS_DOMAIN, in_signature="s")
+       def Backup(self, filename):
+               self.collecty.backup(filename)
+
        @dbus.service.method(BUS_DOMAIN, in_signature="sa{sv}", out_signature="a{sv}")
        def GenerateGraph(self, template_name, kwargs):
                """
index 2daa6d0fe3bfbe1a9a2cf401a99dc77374915ab4..cee5bd8f6b099251206605c6846972c97b75a82a 100644 (file)
@@ -38,6 +38,17 @@ class CollectyClient(object):
 
                self.proxy = self.bus.get_object(BUS_DOMAIN, "/GraphGenerator")
 
+       def backup(self, filename):
+               """
+                       Writes a backup of everything to file given filehandle
+               """
+               self.proxy.Backup(filename)
+
+       def backup_cli(self, ns):
+               print(_("Backing up..."))
+
+               self.backup(ns.filename)
+
        def last_update(self, template_name, **kwargs):
                last_update = self.proxy.LastUpdate(template_name, kwargs)
 
@@ -183,6 +194,13 @@ class CollectyClient(object):
                        help=_("Lists all graph templates"))
                parser_list_templates.set_defaults(func=self.list_templates_cli)
 
+               # backup
+               backup = subparsers.add_parser("backup",
+                       help=_("Backup all RRD data"),
+               )
+               backup.add_argument("filename", nargs="?")
+               backup.set_defaults(func=self.backup_cli)
+
                # version
                parser_version = subparsers.add_parser("version", help=_("Show version"))
                parser_version.set_defaults(func=self.version_cli)
index e55b3a680e11013dcbafba615a1b8ae55590d425..cb811ffd82531680a3d6f9b73183427daa7f44fc 100644 (file)
@@ -25,6 +25,8 @@ import queue
 import rrdtool
 import sched
 import signal
+import tarfile
+import tempfile
 import time
 
 from . import bus
@@ -206,6 +208,35 @@ class Collecty(object):
 
                return plugin.last_update(*args, **kwargs)
 
+       def backup(self, filename):
+               # Write all data to disk first
+               self.write_queue.commit()
+
+               log.info(_("Backing up to %s..." % filename))
+
+               # Opening a compressed tar file with will have all files added to it
+               with tarfile.open(filename, mode="w:gz") as archive:
+                       for path, directories, files in os.walk(DATABASE_DIR):
+                               for file in files:
+                                       # Skip any non-RRD files
+                                       if not file.endswith(".rrd"):
+                                               continue
+
+                                       # Compose the full file path
+                                       file = os.path.join(path, file)
+
+                                       log.debug(_("Adding %s to backup...") % file)
+
+                                       with tempfile.NamedTemporaryFile() as t:
+                                               rrdtool.dump(file, t.name)
+
+                                               # Add the file to the archive
+                                               archive.add(
+                                                       t.name, arcname=file[len(DATABASE_DIR):],
+                                               )
+
+               log.info(_("Backup finished"))
+
 
 class WriteQueue(object):
        def __init__(self, collecty):