]> git.ipfire.org Git - collecty.git/blob - src/collecty/daemon.py
Improve logging by using the native journal module
[collecty.git] / src / collecty / daemon.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # collecty - A system statistics collection daemon for IPFire #
5 # Copyright (C) 2012 IPFire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import signal
23
24 import plugins
25
26 from constants import *
27 from i18n import _
28
29 import logging
30 log = logging.getLogger("collecty")
31
32 class Collecty(object):
33 # The default interval, when all data is written to disk.
34 SUBMIT_INTERVAL = 300
35
36 def __init__(self, debug=False):
37 # Enable debug logging when running in debug mode
38 if debug:
39 log.setLevel(logging.DEBUG)
40
41 self.data_sources = []
42
43 # Indicates whether this process should be running or not.
44 self.running = True
45 self.timer = plugins.Timer(self.SUBMIT_INTERVAL, heartbeat=2)
46
47 # Add all automatic data sources.
48 self.add_autocreate_data_sources()
49
50 log.info(_("Collecty successfully initialized."))
51
52 def add_autocreate_data_sources(self):
53 for data_source in plugins.data_sources:
54 if not hasattr(data_source, "autocreate"):
55 continue
56
57 ret = data_source.autocreate(self)
58 if not ret:
59 continue
60
61 if not type(ret) == type([]):
62 ret = [ret,]
63
64 log.debug(_("Data source '%(name)s' registered %(number)s instance(s).") % \
65 { "name" : data_source.name, "number" : len(ret) })
66
67 self.data_sources += ret
68
69 def run(self):
70 # Register signal handlers.
71 self.register_signal_handler()
72
73 # Start all data source threads.
74 for ds in self.data_sources:
75 ds.start()
76
77 # Regularly submit all data to disk.
78 while self.running:
79 if self.timer.wait():
80 self.submit_all()
81
82 # Wait until all instances are finished.
83 while self.data_sources:
84 for ds in self.data_sources[:]:
85 if not ds.isAlive():
86 log.debug(_("%s is not alive anymore. Removing.") % ds)
87 self.data_sources.remove(ds)
88
89 # Wait a bit.
90 time.sleep(0.1)
91
92 log.debug(_("No thread running. Exiting main thread."))
93
94 def submit_all(self):
95 """
96 Submit all data right now.
97 """
98 log.debug(_("Submitting all data in memory"))
99 for ds in self.data_sources:
100 ds._submit()
101
102 # Schedule the next submit.
103 self.timer.reset()
104
105 def shutdown(self):
106 log.debug(_("Received shutdown signal"))
107
108 self.running = False
109 if self.timer:
110 self.timer.cancel()
111
112 # Propagating shutdown to all threads.
113 for ds in self.data_sources:
114 ds.shutdown()
115
116 def register_signal_handler(self):
117 for s in (signal.SIGTERM, signal.SIGINT, signal.SIGUSR1):
118 log.debug(_("Registering signal %d") % s)
119
120 signal.signal(s, self.signal_handler)
121
122 def signal_handler(self, sig, *args, **kwargs):
123 log.info(_("Caught signal %d") % sig)
124
125 if sig in (signal.SIGTERM, signal.SIGINT):
126 # Shutdown this application.
127 self.shutdown()
128
129 elif sig == signal.SIGUSR1:
130 # Submit all data.
131 self.submit_all()
132
133 @property
134 def graph_default_arguments(self):
135 return GRAPH_DEFAULT_ARGUMENTS