3
# Copyright (C) 2011, 2012, 2014 Canonical
5
# Author: Liam Young, Jacek Nykis
7
from collections import defaultdict
8
from fnmatch import fnmatchcase
9
from itertools import chain
13
def gen_data_lines(filename):
14
with open(filename, "rb") as fin:
16
if not line.startswith("#"):
20
def gen_stats(data_lines):
21
for line in data_lines:
23
vhost, queue, _, _, m_all, _ = line.split(None, 5)
25
print "ERROR: problem parsing the stats file"
27
assert m_all.isdigit(), "Message count is not a number: %r" % m_all
28
yield vhost, queue, int(m_all)
31
def collate_stats(stats, limits):
32
# Create a dict with stats collated according to the definitions in the
33
# limits file. If none of the definitions in the limits file is matched,
34
# store the stat without collating.
35
collated = defaultdict(lambda: 0)
36
for vhost, queue, m_all in stats:
37
for l_vhost, l_queue, _, _ in limits:
38
if fnmatchcase(vhost, l_vhost) and fnmatchcase(queue, l_queue):
39
collated[l_vhost, l_queue] += m_all
42
collated[vhost, queue] += m_all
46
def check_stats(stats_collated, limits):
47
# Create a limits lookup dict with keys of the form (vhost, queue).
49
((l_vhost, l_queue), (int(t_warning), int(t_critical)))
50
for l_vhost, l_queue, t_warning, t_critical in limits)
51
if not (stats_collated):
52
yield 'No Queues Found', 'No Vhosts Found', None, "CRIT"
53
# Go through the stats and compare again limits, if any.
54
for l_vhost, l_queue in sorted(stats_collated):
55
m_all = stats_collated[l_vhost, l_queue]
57
t_warning, t_critical = limits_lookup[l_vhost, l_queue]
59
yield l_queue, l_vhost, m_all, "UNKNOWN"
61
if m_all >= t_critical:
62
yield l_queue, l_vhost, m_all, "CRIT"
63
elif m_all >= t_warning:
64
yield l_queue, l_vhost, m_all, "WARN"
67
if __name__ == "__main__":
68
parser = argparse.ArgumentParser(description='RabbitMQ queue size nagios check.')
69
parser.add_argument('-c', nargs=4, action='append', required=True,
70
metavar=('vhost', 'queue', 'warn', 'crit'),
71
help=('Vhost and queue to check. Can be used multiple times'))
72
parser.add_argument('stats_file', nargs='*', type=str, help='file containing queue stats')
73
args = parser.parse_args()
75
# Start generating stats from all files given on the command line.
78
gen_data_lines(filename) for filename in args.stats_file))
79
# Collate stats according to limit definitions and check.
80
stats_collated = collate_stats(stats, args.c)
81
stats_checked = check_stats(stats_collated, args.c)
82
criticals, warnings = [], []
83
for queue, vhost, message_no, status in stats_checked:
86
"%s in %s has %s messages" % (queue, vhost, message_no))
87
elif status == "WARN":
89
"%s in %s has %s messages" % (queue, vhost, message_no))
90
if len(criticals) > 0:
91
print "CRITICALS: %s" % ", ".join(criticals)
93
# XXX: No warnings if there are criticals?
94
elif len(warnings) > 0:
95
print "WARNINGS: %s" % ", ".join(warnings)