]> git.ipfire.org Git - collecty.git/blob - src/collecty/daemon.py
00a4431f893158b4da6e59cfe018ddaa962e3036
[collecty.git] / src / collecty / daemon.py
1 #!/usr/bin/python
2 ###############################################################################
3 # #
4 # collecty - A system statistics collection daemon for IPFire #
5 # Copyright (C) 2012 IPFire development team #
6 # #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
11 # #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
16 # #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
19 # #
20 ###############################################################################
21
22 import signal
23
24 import plugins
25
26 from constants import *
27 from i18n import _
28
29 import logging
30 log = logging.getLogger("collecty")
31
32 class Collecty(object):
33 # The default interval, when all data is written to disk.
34 SUBMIT_INTERVAL = 300
35
36 def __init__(self, debug=False):
37 self.data_sources = []
38
39 # Indicates whether this process should be running or not.
40 self.running = True
41 self.timer = plugins.Timer(self.SUBMIT_INTERVAL, heartbeat=2)
42
43 # Add all automatic data sources.
44 self.add_autocreate_data_sources()
45
46 log.info(_("Collecty successfully initialized."))
47
48 def add_autocreate_data_sources(self):
49 for data_source in plugins.data_sources:
50 if not hasattr(data_source, "autocreate"):
51 continue
52
53 ret = data_source.autocreate(self)
54 if not ret:
55 continue
56
57 if not type(ret) == type([]):
58 ret = [ret,]
59
60 log.debug(_("Data source '%(name)s' registered %(number)s instance(s).") % \
61 { "name" : data_source.name, "number" : len(ret) })
62
63 self.data_sources += ret
64
65 def run(self):
66 # Register signal handlers.
67 self.register_signal_handler()
68
69 # Start all data source threads.
70 for ds in self.data_sources:
71 ds.start()
72
73 # Regularly submit all data to disk.
74 while self.running:
75 if self.timer.wait():
76 self.submit_all()
77
78 # Wait until all instances are finished.
79 while self.data_sources:
80 for ds in self.data_sources[:]:
81 if not ds.isAlive():
82 log.debug(_("%s is not alive anymore. Removing.") % ds)
83 self.data_sources.remove(ds)
84
85 # Wait a bit.
86 time.sleep(0.1)
87
88 log.debug(_("No thread running. Exiting main thread."))
89
90 def submit_all(self):
91 """
92 Submit all data right now.
93 """
94 log.debug(_("Submitting all data in memory"))
95 for ds in self.data_sources:
96 ds._submit()
97
98 # Schedule the next submit.
99 self.timer.reset()
100
101 def shutdown(self):
102 log.debug(_("Received shutdown signal"))
103
104 self.running = False
105 if self.timer:
106 self.timer.cancel()
107
108 # Propagating shutdown to all threads.
109 for ds in self.data_sources:
110 ds.shutdown()
111
112 def register_signal_handler(self):
113 for s in (signal.SIGTERM, signal.SIGINT, signal.SIGUSR1):
114 log.debug(_("Registering signal %d") % s)
115
116 signal.signal(s, self.signal_handler)
117
118 def signal_handler(self, sig, *args, **kwargs):
119 log.info(_("Caught signal %d") % sig)
120
121 if sig in (signal.SIGTERM, signal.SIGINT):
122 # Shutdown this application.
123 self.shutdown()
124
125 elif sig == signal.SIGUSR1:
126 # Submit all data.
127 self.submit_all()
128
129 @property
130 def graph_default_arguments(self):
131 return GRAPH_DEFAULT_ARGUMENTS