client = collecty.CollectyClient()
-for type in ("cpu", "entropy", "memory", "loadavg"):
- for interval in ("day", "week", "year"):
- client.graph(type, "%s-%s.png" % (type, interval), interval)
+for ds in client.data_sources:
+ for template in ds.templates:
+ t = template(ds)
+
+ for interval in ("-3h", "day", "week", "year"):
+ t.graph("%s-%s.png" % (ds.id, interval), interval)
self.collecty = daemon.Collecty(**settings)
@property
- def instances(self):
- return self.collecty.instances
+ def data_sources(self):
+ return self.collecty.data_sources
- def get_instance_by_id(self, id):
- for instance in self.instances:
- if not instance.id == id:
+ def get_data_source_by_id(self, id):
+ for ds in self.data_sources:
+ if not ds.id == id:
continue
- return instance
+ return ds
def graph(self, id, filename, interval=None, **kwargs):
- instance = self.get_instance_by_id(id)
- assert instance, "Could not find instance: %s" % id
+ ds = self.get_data_source_by_id(id)
+ assert ds, "Could not find data source: %s" % id
- instance.graph(filename, interval=interval, **kwargs)
+ ds.graph(filename, interval=interval, **kwargs)
def __init__(self, debug=False):
self.config = configparser.ConfigParser()
- self.instances = []
+ self.data_sources = []
# Indicates whether this process should be running or not.
self.running = True
self.timer = plugins.Timer(self.SUBMIT_INTERVAL, heartbeat=2)
- # Add all automatic plugins.
- self.add_autocreate_plugins()
+ # Add all automatic data sources.
+ self.add_autocreate_data_sources()
log.info(_("Collecty successfully initialized."))
- def add_autocreate_plugins(self):
- for plugin in plugins.registered_plugins:
- if not hasattr(plugin, "autocreate"):
+ def add_autocreate_data_sources(self):
+ for data_source in plugins.data_sources:
+ if not hasattr(data_source, "autocreate"):
continue
- ret = plugin.autocreate(self)
+ ret = data_source.autocreate(self)
if not ret:
continue
if not type(ret) == type([]):
ret = [ret,]
- log.debug(_("Plugin '%(name)s' registered %(number)s instance(s).") % \
- { "name" : plugin.name, "number" : len(ret) })
+ log.debug(_("Data source '%(name)s' registered %(number)s instance(s).") % \
+ { "name" : data_source.name, "number" : len(ret) })
- self.instances += ret
+ self.data_sources += ret
def read_config(self, config):
self.config.read(config)
for section in self.config.sections():
try:
- plugin = self.config.get(section, "plugin")
- plugin = plugins.find(plugin)
+ data_source = self.config.get(section, "data_source")
+ data_source = plugins.find(data_source)
except configparser.NoOptionError:
raise ConfigError, "Syntax error in configuration: plugin option is missing."
except:
- raise Exception, "Plugin configuration error: Maybe plugin wasn't found? %s" % plugin
+ raise Exception, "Plugin configuration error: Maybe plugin wasn't found? %s" % data_source
kwargs = {}
for (key, value) in self.config.items(section):
kwargs[key] = value
kwargs["file"] = section
- i = plugin(self, **kwargs)
- self.instances.append(i)
+ ds = data_source(self, **kwargs)
+ self.data_sources.append(ds)
def run(self):
# Register signal handlers.
self.register_signal_handler()
- # Start all plugin instances.
- for i in self.instances:
- i.start()
+ # Start all data source threads.
+ for ds in self.data_sources:
+ ds.start()
# Regularly submit all data to disk.
while self.running:
self.submit_all()
# Wait until all instances are finished.
- while self.instances:
- for instance in self.instances[:]:
- if not instance.isAlive():
- log.debug(_("%s is not alive anymore. Removing.") % instance)
- self.instances.remove(instance)
+ while self.data_sources:
+ for ds in self.data_sources[:]:
+ if not ds.isAlive():
+ log.debug(_("%s is not alive anymore. Removing.") % ds)
+ self.data_sources.remove(ds)
# Wait a bit.
time.sleep(0.1)
Submit all data right now.
"""
log.debug(_("Submitting all data in memory"))
- for i in self.instances:
- i._submit()
+ for ds in self.data_sources:
+ ds._submit()
# Schedule the next submit.
self.timer.reset()
self.timer.cancel()
# Propagating shutdown to all threads.
- for i in self.instances:
- i.shutdown()
+ for ds in self.data_sources:
+ ds.shutdown()
def register_signal_handler(self):
for s in (signal.SIGTERM, signal.SIGINT, signal.SIGUSR1):
import loadavg
import memory
-registered_plugins = [
- cpu.PluginCPU,
- entropy.PluginEntropy,
- interface.PluginInterface,
- loadavg.PluginLoadAvg,
- memory.PluginMemory,
+data_sources = [
+ cpu.DataSourceCPU,
+ entropy.DataSourceEntropy,
+ interface.DataSourceInterface,
+ loadavg.DataSourceLoadAvg,
+ memory.DataSourceMemory,
]
+
+# Generate graph templates list.
+graph_templates = []
+for ds in data_sources:
+ graph_templates += ds.templates
return self.elapsed > self.timeout
-class Plugin(threading.Thread):
+class DataSource(threading.Thread):
# The name of this plugin.
name = None
# A description for this plugin.
description = None
+ # Templates which can be used to generate a graph out of
+ # the data from this data source.
+ templates = []
+
# The schema of the RRD database.
rrd_schema = None
rra_timespans = [3600, 86400, 604800, 2678400, 31622400]
rra_rows = 2880
- # Instructions how to create the graph.
- rrd_graph = None
-
- # Extra arguments passed to rrdgraph.
- rrd_graph_args = []
-
# The default interval of this plugin.
default_interval = 60
self.log.info(_("Successfully initialized (%s).") % self.id)
def __repr__(self):
- return "<Plugin %s>" % self.name
-
- def __str__(self):
- return "Plugin %s %s" % (self.name, self.file)
+ return "<%s %s>" % (self.__class__.__name__, self.id)
@property
def id(self):
"""
return int(time.time())
+
+class GraphTemplate(object):
+ # A unique name to identify this graph template.
+ name = None
+
+ # Instructions how to create the graph.
+ rrd_graph = None
+
+ # Extra arguments passed to rrdgraph.
+ rrd_graph_args = []
+
+ def __init__(self, ds):
+ self.ds = ds
+
+ @property
+ def collecty(self):
+ return self.ds.collecty
+
def graph(self, file, interval=None,
width=GRAPH_DEFAULT_WIDTH, height=GRAPH_DEFAULT_HEIGHT):
-
args = [
"--width", "%d" % width,
"--height", "%d" % height,
}
args.append("--start")
- if intervals.has_key(interval):
+ try:
args.append(intervals[interval])
- else:
+ except KeyError:
args.append(interval)
- info = { "file" : self.file }
+ info = { "file" : self.ds.file }
for item in self.rrd_graph:
try:
args.append(item % info)
from ..i18n import _
-class PluginCPU(base.Plugin):
+class GraphTemplateCPU(base.GraphTemplate):
name = "cpu"
- description = "CPU Usage Plugin"
-
- rrd_schema = [
- "DS:user:GAUGE:120:0:U",
- "DS:nice:GAUGE:120:0:U",
- "DS:sys:GAUGE:120:0:U",
- "DS:idle:GAUGE:120:0:U",
- "DS:wait:GAUGE:120:0:U",
- "DS:irq:GAUGE:120:0:U",
- "DS:sirq:GAUGE:120:0:U",
- "RRA:AVERAGE:0.5:1:2160",
- "RRA:AVERAGE:0.5:5:2016",
- "RRA:AVERAGE:0.5:15:2880",
- "RRA:AVERAGE:0.5:60:8760",
- ]
rrd_graph = [
"DEF:user=%(file)s:user:AVERAGE",
#"--lower-limit", "0", "--upper-limit", "100",
]
+
+class DataSourceCPU(base.DataSource):
+ name = "cpu"
+ description = "CPU Usage Data Source"
+
+ templates = [GraphTemplateCPU,]
+
+ rrd_schema = [
+ "DS:user:GAUGE:120:0:U",
+ "DS:nice:GAUGE:120:0:U",
+ "DS:sys:GAUGE:120:0:U",
+ "DS:idle:GAUGE:120:0:U",
+ "DS:wait:GAUGE:120:0:U",
+ "DS:irq:GAUGE:120:0:U",
+ "DS:sirq:GAUGE:120:0:U",
+ "RRA:AVERAGE:0.5:1:2160",
+ "RRA:AVERAGE:0.5:5:2016",
+ "RRA:AVERAGE:0.5:15:2880",
+ "RRA:AVERAGE:0.5:60:8760",
+ ]
+
@classmethod
def autocreate(cls, collecty, **kwargs):
# Every system has got at least one CPU.
ENTROPY_FILE = "/proc/sys/kernel/random/entropy_avail"
-class PluginEntropy(base.Plugin):
+class GraphTemplateEntropy(base.GraphTemplate):
name = "entropy"
- description = "Entropy Plugin"
-
- rrd_schema = [
- "DS:entropy:GAUGE:120:0:U",
- "RRA:AVERAGE:0.5:1:2160",
- "RRA:AVERAGE:0.5:5:2016",
- "RRA:AVERAGE:0.5:15:2880",
- "RRA:AVERAGE:0.5:60:8760",
- ]
rrd_graph = [
"DEF:entropy=%(file)s:entropy:AVERAGE",
"LINE3:entropytrend#000000",
]
+
rrd_graph_args = [
"--title", _("Available entropy"),
"--vertical-label", _("Bits"),
"--lower-limit", "0", "--rigid",
]
+
+class DataSourceEntropy(base.DataSource):
+ name = "entropy"
+ description = "Entropy Data Source"
+
+ templates = [GraphTemplateEntropy,]
+
+ rrd_schema = [
+ "DS:entropy:GAUGE:120:0:U",
+ "RRA:AVERAGE:0.5:1:2160",
+ "RRA:AVERAGE:0.5:5:2016",
+ "RRA:AVERAGE:0.5:15:2880",
+ "RRA:AVERAGE:0.5:60:8760",
+ ]
+
@classmethod
def autocreate(cls, collecty, **kwargs):
if not os.path.exists(ENTROPY_FILE):
SYS_CLASS_NET = "/sys/class/net"
-class PluginInterface(base.Plugin):
+class DataSourceInterface(base.DataSource):
name = "interface"
- description = "Interface Statistics"
+ description = "Interface Statistics Data Source"
+
+ templates = []
rrd_schema = [
"DS:bytes_rx:DERIVE:0:U",
from ..i18n import _
-class PluginLoadAvg(base.Plugin):
+class GraphTemplateLoadAvg(base.GraphTemplate):
name = "loadavg"
- description = "Load Average Plugin"
-
- rrd_schema = [
- "DS:load1:GAUGE:120:0:U",
- "DS:load5:GAUGE:120:0:U",
- "DS:load15:GAUGE:120:0:U",
- "RRA:AVERAGE:0.5:1:2160",
- "RRA:AVERAGE:0.5:5:2016",
- "RRA:AVERAGE:0.5:15:2880",
- "RRA:AVERAGE:0.5:60:8760",
- ]
rrd_graph = [
"DEF:load1=%(file)s:load1:AVERAGE",
"LINE:load5#dd8800",
"LINE:load1#dd0000",
]
+
rrd_graph_args = [
"--title", _("Load average"),
"--vertical-label", _("Load"),
"--lower-limit", "0", "--rigid",
]
+
+class DataSourceLoadAvg(base.DataSource):
+ name = "loadavg"
+ description = "Load Average Data Source"
+
+ templates = [GraphTemplateLoadAvg,]
+
+ rrd_schema = [
+ "DS:load1:GAUGE:120:0:U",
+ "DS:load5:GAUGE:120:0:U",
+ "DS:load15:GAUGE:120:0:U",
+ "RRA:AVERAGE:0.5:1:2160",
+ "RRA:AVERAGE:0.5:5:2016",
+ "RRA:AVERAGE:0.5:15:2880",
+ "RRA:AVERAGE:0.5:60:8760",
+ ]
+
@classmethod
def autocreate(cls, collecty, **kwargs):
return cls(collecty, **kwargs)
from ..i18n import _
-class PluginMemory(base.Plugin):
+class GraphTemplateMemory(base.GraphTemplate):
name = "memory"
- description = "Memory Usage Plugin"
-
- rrd_schema = [
- "DS:used:GAUGE:120:0:100",
- "DS:cached:GAUGE:120:0:100",
- "DS:buffered:GAUGE:120:0:100",
- "DS:free:GAUGE:120:0:100",
- "DS:swap:GAUGE:120:0:100",
- "RRA:AVERAGE:0.5:1:2160",
- "RRA:AVERAGE:0.5:5:2016",
- "RRA:AVERAGE:0.5:15:2880",
- "RRA:AVERAGE:0.5:60:8760",
- ]
rrd_graph = [
"DEF:used=%(file)s:used:AVERAGE",
"GPRINT:swapmin:%12s\:" % _("Minimum") + " %6.2lf",
"GPRINT:swapavg:%12s\:" % _("Average") + " %6.2lf\\n",
]
+
rrd_graph_args = [
"--title", _("Memory Usage"),
"--vertical-label", _("Percent"),
"--rigid",
]
+
+class DataSourceMemory(base.DataSource):
+ name = "memory"
+ description = "Memory Usage Data Source"
+
+ templates = [GraphTemplateMemory,]
+
+ rrd_schema = [
+ "DS:used:GAUGE:120:0:100",
+ "DS:cached:GAUGE:120:0:100",
+ "DS:buffered:GAUGE:120:0:100",
+ "DS:free:GAUGE:120:0:100",
+ "DS:swap:GAUGE:120:0:100",
+ "RRA:AVERAGE:0.5:1:2160",
+ "RRA:AVERAGE:0.5:5:2016",
+ "RRA:AVERAGE:0.5:15:2880",
+ "RRA:AVERAGE:0.5:60:8760",
+ ]
+
@classmethod
def autocreate(cls, collecty, **kwargs):
# Every system has got memory.