-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy path60_rabbitmq.py
143 lines (120 loc) · 4.28 KB
/
60_rabbitmq.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
#!/bin/env python
# -*- coding:utf-8 -*-
import time
import socket
import requests
import json
# open-falcon push data
Entry = {
"Endpoint": socket.gethostname(),
"Timestamp": int(time.time()),
"Step": 60
}
def new_entry(counter_type, metric, tag, value):
entry = Entry.copy()
entry.update({
"CounterType": counter_type,
"Metric": metric,
"TAGS": "type=rabbit,{0}".format(tag),
"Value": value
})
return entry
class Manager(object):
def __init__(self, username, password, host="127.0.0.1", port=15672):
self.username = username
self.password = password
self.host = host
self.port = port
self.entries = []
@property
def api(self):
return "http://{0}:{1}/api/".format(self.host, self.port)
def get_api_data(self, name):
url = self.api + name
response = requests.get(url, auth=(self.username, self.password))
return response.json()
def dump_queues(self, excludes):
for q in self.get_api_data("queues"):
if q['name'] in excludes:
continue
self.dump_queue(q)
def dump_queue(self, data):
tag = "queue_name={0}".format(data['name'])
# state 'running' is 1
self.entries.append(new_entry(
"GAUGE",
"rabbit.queue.state",
tag,
1 if data['state'] == 'running' else 0
))
if "message_stats" not in data:
return
#################
# message_stats #
#################
message_stats = data['message_stats']
for key in ["publish", "confirm", "get", "deliver", "deliver_get"]:
# publish: Count of messages published.
# confirm: Count of messages confirmed.
# get: Count of messages delivered in acknowledgement mode in response to basic.get.
# deliver: Count of messages delivered in acknowledgement mode to consumers.
# deliver_get :Sum four of deliver/deliver_noack and get/get_noack.
if key in message_stats:
self.entries.append(new_entry(
"GAUGE",
"rabbit.queue.message_stats.{0}".format(key),
tag,
message_stats[key]
))
def dump_nodes(self):
for node in self.get_api_data("nodes"):
self.dump_node(node)
def dump_node(self, data):
tag = "node={0}".format(data["name"])
partitions = 0 if data["partitions"] is None else 1
self.entries.append(new_entry(
"GAUGE",
"rabbit.node.partitions",
tag,
partitions
))
running = 1 if data["running"] else 0
self.entries.append(new_entry(
"GAUGE",
"rabbit.node.running",
tag,
running
))
for key in ["fd_used", "sockets_used", "proc_used",
"io_read_avg_time", "io_write_avg_time",
"io_sync_avg_time", "io_seek_avg_time",
"io_file_handle_open_attempt_avg_time"]:
self.entries.append(new_entry(
"GAUGE",
"rabbit.node.{0}".format(key),
tag,
data[key]
))
for key in ["io_read_count", "io_read_bytes",
"io_write_count", "io_write_bytes",
"io_sync_count", "io_seek_count", "io_reopen_count",
"mnesia_ram_tx_count", "mnesia_disk_tx_count",
"msg_store_read_count", "msg_store_write_count",
"queue_index_journal_write_count",
"queue_index_read_count", "queue_index_write_count",
"gc_num", "gc_bytes_reclaimed",
"context_switches", "io_file_handle_open_attempt_count",
"sockets_total"]:
self.entries.append(new_entry(
"COUNTER",
"rabbit.node.{0}".format(key),
tag,
data[key]
))
def dumps_all(self):
return json.dumps(self.entries)
if __name__ == '__main__':
manager = Manager("user", "password")
manager.dump_queues(["nxin.monitor.queue"])
manager.dump_nodes()
print(manager.dumps_all())