|
| 1 | +#!/usr/bin/env python |
| 2 | + |
| 3 | +import datetime |
| 4 | +import http.server |
| 5 | +import json |
| 6 | +import logging |
| 7 | +import os |
| 8 | +import threading |
| 9 | +import time |
| 10 | +import urllib.parse |
| 11 | + |
| 12 | +import pika |
| 13 | +import requests |
| 14 | + |
| 15 | +rabbitmq_uri = os.getenv('RABBITMQ_URI', 'amqp://guest:guest@localhost/%2F') |
| 16 | +rabbitmq_mgmt_port = os.getenv('RABBITMQ_MGMT_PORT', '15672') |
| 17 | +rabbitmq_mgmt_url = '' |
| 18 | + |
| 19 | +extractors = {} |
| 20 | + |
| 21 | +update_frequency = 60 |
| 22 | + |
| 23 | +hostName = "" |
| 24 | +hostPort = 9999 |
| 25 | + |
| 26 | + |
| 27 | +# ---------------------------------------------------------------------- |
| 28 | +# WEB SERVER |
| 29 | +# ---------------------------------------------------------------------- |
| 30 | +class MyServer(http.server.BaseHTTPRequestHandler): |
| 31 | + def do_GET(self): |
| 32 | + self.send_response(200) |
| 33 | + self.send_header('Content-type', 'application/json') |
| 34 | + self.end_headers() |
| 35 | + self.wfile.write(bytes(json.dumps(extractors), 'utf-8')) |
| 36 | + |
| 37 | + |
| 38 | +def http_server(): |
| 39 | + server = http.server.HTTPServer((hostName, hostPort), MyServer) |
| 40 | + try: |
| 41 | + server.serve_forever() |
| 42 | + finally: |
| 43 | + server.server_close() |
| 44 | + |
| 45 | + |
| 46 | +# ---------------------------------------------------------------------- |
| 47 | +# MESSAGES IN QUEUES |
| 48 | +# ---------------------------------------------------------------------- |
| 49 | +def get_mgmt_queue_messages(queue): |
| 50 | + global rabbitmq_username, rabbitmq_password |
| 51 | + try: |
| 52 | + response = requests.get(rabbitmq_mgmt_url + queue, auth=(rabbitmq_username, rabbitmq_password), timeout=5) |
| 53 | + response.raise_for_status() |
| 54 | + return response.json()['messages'] |
| 55 | + except: |
| 56 | + logging.exception("Error getting list of messages in %s" % queue) |
| 57 | + return 0 |
| 58 | + |
| 59 | + |
| 60 | +def update_counts(): |
| 61 | + global extractors, update_frequency |
| 62 | + |
| 63 | + while True: |
| 64 | + for versions in extractors.values(): |
| 65 | + for extractor in versions.values(): |
| 66 | + # use management api to get counts |
| 67 | + old_waiting = get_mgmt_queue_messages(extractor['queue']) |
| 68 | + new_waiting = get_mgmt_queue_messages('extractors.' + extractor['queue']) |
| 69 | + errors = get_mgmt_queue_messages('error.' + extractor['queue']) |
| 70 | + |
| 71 | + extractor['messages'] = { |
| 72 | + 'queues': { |
| 73 | + 'total': old_waiting + new_waiting, |
| 74 | + 'direct': new_waiting, |
| 75 | + 'topic': old_waiting |
| 76 | + }, |
| 77 | + 'error': errors |
| 78 | + } |
| 79 | + |
| 80 | + time.sleep(update_frequency) |
| 81 | + |
| 82 | + |
| 83 | +# ---------------------------------------------------------------------- |
| 84 | +# EXTRACTOR HEARTBEATS |
| 85 | +# ---------------------------------------------------------------------- |
| 86 | +def callback(ch, method, properties, body): |
| 87 | + global extractors |
| 88 | + |
| 89 | + data = json.loads(body.decode('utf-8')) |
| 90 | + data['updated'] = datetime.datetime.now().isoformat() |
| 91 | + if 'id' not in data and 'extractor_info' not in data and 'queue' not in data: |
| 92 | + logging.error("missing fields in json : %r " % body) |
| 93 | + return |
| 94 | + |
| 95 | + extractor_info = data['extractor_info'] |
| 96 | + |
| 97 | + if extractor_info['name'] not in extractors: |
| 98 | + extractors[extractor_info['name']] = {} |
| 99 | + |
| 100 | + if extractor_info['version'] not in extractors[extractor_info['name']]: |
| 101 | + extractors[extractor_info['name']][extractor_info['version']] = { |
| 102 | + 'extractor_info': extractor_info, |
| 103 | + 'queue': data['queue'], |
| 104 | + 'extractors': {} |
| 105 | + } |
| 106 | + extractor = extractors[extractor_info['name']][extractor_info['version']] |
| 107 | + |
| 108 | + extractor['extractors'][data['id']] = { |
| 109 | + 'last_seen': datetime.datetime.now().isoformat(), |
| 110 | + } |
| 111 | + |
| 112 | + if extractor['queue'] != data['queue']: |
| 113 | + logging.error("mismatched queue names %s != %s." % (data['queue'], extractor['queue'])) |
| 114 | + extractor['queue'] = data['queue'] |
| 115 | + |
| 116 | + |
| 117 | +def extractors_monitor(): |
| 118 | + global rabbitmq_mgmt_url, rabbitmq_mgmt_port, rabbitmq_username, rabbitmq_password |
| 119 | + |
| 120 | + params = pika.URLParameters(rabbitmq_uri) |
| 121 | + connection = pika.BlockingConnection(params) |
| 122 | + |
| 123 | + # create management url |
| 124 | + rabbitmq_url = '' |
| 125 | + if rabbitmq_mgmt_port != '': |
| 126 | + if params.ssl: |
| 127 | + rabbitmq_mgmt_url = 'https://' |
| 128 | + else: |
| 129 | + rabbitmq_mgmt_url = 'http://' |
| 130 | + rabbitmq_mgmt_url = "%s%s:%s/api/queues/%s/" % (rabbitmq_mgmt_url, params.host, rabbitmq_mgmt_port, |
| 131 | + urllib.parse.quote_plus(params.virtual_host)) |
| 132 | + rabbitmq_username = params.credentials.username |
| 133 | + rabbitmq_password = params.credentials.password |
| 134 | + |
| 135 | + # connect to channel |
| 136 | + channel = connection.channel() |
| 137 | + |
| 138 | + # create extractors exchange for fanout |
| 139 | + channel.exchange_declare(exchange='extractors', exchange_type='fanout', durable=True) |
| 140 | + |
| 141 | + # create anonymous queue |
| 142 | + result = channel.queue_declare(exclusive=True) |
| 143 | + channel.queue_bind(exchange='extractors', queue=result.method.queue) |
| 144 | + |
| 145 | + # listen for messages |
| 146 | + channel.basic_consume(callback, queue=result.method.queue, no_ack=True) |
| 147 | + |
| 148 | + channel.start_consuming() |
| 149 | + |
| 150 | + |
| 151 | +# ---------------------------------------------------------------------- |
| 152 | +# MAIN |
| 153 | +# ---------------------------------------------------------------------- |
| 154 | +if __name__ == "__main__": |
| 155 | + logging.basicConfig(format='%(asctime)-15s [%(threadName)-15s] %(levelname)-7s :' |
| 156 | + ' %(name)s - %(message)s', |
| 157 | + level=logging.INFO) |
| 158 | + logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARN) |
| 159 | + |
| 160 | + thread = threading.Thread(target=http_server) |
| 161 | + thread.setDaemon(True) |
| 162 | + thread.start() |
| 163 | + |
| 164 | + thread = threading.Thread(target=update_counts) |
| 165 | + thread.setDaemon(True) |
| 166 | + thread.start() |
| 167 | + |
| 168 | + extractors_monitor() |
0 commit comments