]>
git.ipfire.org Git - collecty.git/blob - src/collecty/daemon.py
2 ###############################################################################
4 # collecty - A system statistics collection daemon for IPFire #
5 # Copyright (C) 2012 IPFire development team #
7 # This program is free software: you can redistribute it and/or modify #
8 # it under the terms of the GNU General Public License as published by #
9 # the Free Software Foundation, either version 3 of the License, or #
10 # (at your option) any later version. #
12 # This program is distributed in the hope that it will be useful, #
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of #
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
15 # GNU General Public License for more details. #
17 # You should have received a copy of the GNU General Public License #
18 # along with this program. If not, see <http://www.gnu.org/licenses/>. #
20 ###############################################################################
24 import ConfigParser
as configparser
28 from constants
import *
32 log
= logging
.getLogger("collecty")
34 class Collecty(object):
35 # The default interval, when all data is written to disk.
38 def __init__(self
, debug
=False):
39 self
.config
= configparser
.ConfigParser()
40 self
.data_sources
= []
42 # Indicates whether this process should be running or not.
44 self
.timer
= plugins
.Timer(self
.SUBMIT_INTERVAL
, heartbeat
=2)
46 # Add all automatic data sources.
47 self
.add_autocreate_data_sources()
49 log
.info(_("Collecty successfully initialized."))
51 def add_autocreate_data_sources(self
):
52 for data_source
in plugins
.data_sources
:
53 if not hasattr(data_source
, "autocreate"):
56 ret
= data_source
.autocreate(self
)
60 if not type(ret
) == type([]):
63 log
.debug(_("Data source '%(name)s' registered %(number)s instance(s).") % \
64 { "name" : data_source
.name
, "number" : len(ret
) })
66 self
.data_sources
+= ret
68 def read_config(self
, config
):
69 self
.config
.read(config
)
71 for section
in self
.config
.sections():
73 data_source
= self
.config
.get(section
, "data_source")
74 data_source
= plugins
.find(data_source
)
75 except configparser
.NoOptionError
:
76 raise ConfigError
, "Syntax error in configuration: plugin option is missing."
78 raise Exception, "Plugin configuration error: Maybe plugin wasn't found? %s" % data_source
81 for (key
, value
) in self
.config
.items(section
):
86 kwargs
["file"] = section
88 ds
= data_source(self
, **kwargs
)
89 self
.data_sources
.append(ds
)
92 # Register signal handlers.
93 self
.register_signal_handler()
95 # Start all data source threads.
96 for ds
in self
.data_sources
:
99 # Regularly submit all data to disk.
101 if self
.timer
.wait():
104 # Wait until all instances are finished.
105 while self
.data_sources
:
106 for ds
in self
.data_sources
[:]:
108 log
.debug(_("%s is not alive anymore. Removing.") % ds
)
109 self
.data_sources
.remove(ds
)
114 log
.debug(_("No thread running. Exiting main thread."))
116 def submit_all(self
):
118 Submit all data right now.
120 log
.debug(_("Submitting all data in memory"))
121 for ds
in self
.data_sources
:
124 # Schedule the next submit.
128 log
.debug(_("Received shutdown signal"))
134 # Propagating shutdown to all threads.
135 for ds
in self
.data_sources
:
138 def register_signal_handler(self
):
139 for s
in (signal
.SIGTERM
, signal
.SIGINT
, signal
.SIGUSR1
):
140 log
.debug(_("Registering signal %d") % s
)
142 signal
.signal(s
, self
.signal_handler
)
144 def signal_handler(self
, sig
, *args
, **kwargs
):
145 log
.info(_("Caught signal %d") % sig
)
147 if sig
in (signal
.SIGTERM
, signal
.SIGINT
):
148 # Shutdown this application.
151 elif sig
== signal
.SIGUSR1
:
156 def graph_default_arguments(self
):
157 return GRAPH_DEFAULT_ARGUMENTS