0
0
Fork 0
mirror of https://github.com/alerta/alerta.git synced 2025-02-21 20:36:07 +00:00
alerta_alerta/alerta/logger/daemon.py

107 lines
3.2 KiB
Python
Raw Normal View History

2013-04-11 16:14:20 +00:00
import time
2014-03-20 23:07:22 +00:00
import threading
import json
import urllib2
from alerta.common import config
2013-04-11 16:14:20 +00:00
from alerta.common import log as logging
from alerta.common.daemon import Daemon
2014-03-17 16:59:17 +00:00
from alerta.common.api import ApiClient
from alerta.common.amqp import Messaging, FanoutConsumer
2014-03-20 23:07:22 +00:00
from alerta.common.alert import AlertDocument
2013-04-09 18:32:54 +00:00
from alerta.common.heartbeat import Heartbeat
2013-03-02 16:44:58 +00:00
from alerta.common.utils import DateEncoder
2014-04-30 09:48:40 +00:00
__version__ = '3.0.4'
2013-03-03 10:48:32 +00:00
LOG = logging.getLogger(__name__)
CONF = config.CONF
2014-03-21 22:38:45 +00:00
class LoggerMessage(FanoutConsumer, threading.Thread):
2014-03-20 23:07:22 +00:00
def __init__(self):
mq = Messaging()
2014-04-30 09:48:40 +00:00
self.connection = mq.connection
2014-03-20 23:07:22 +00:00
2014-04-30 09:48:40 +00:00
FanoutConsumer.__init__(self, self.connection)
2014-03-20 23:07:22 +00:00
threading.Thread.__init__(self)
2013-04-11 16:14:20 +00:00
2014-03-17 16:59:17 +00:00
def on_message(self, body, message):
LOG.debug("Received: %s", body)
2013-07-10 12:25:09 +00:00
try:
2014-03-20 23:07:22 +00:00
logAlert = AlertDocument.parse_alert(body)
2013-07-10 12:25:09 +00:00
except ValueError:
return
2013-04-11 16:14:20 +00:00
if logAlert:
2014-03-17 10:56:58 +00:00
LOG.info('%s : [%s] %s', logAlert.last_receive_id, logAlert.status, logAlert.text)
2013-04-11 16:14:20 +00:00
source_host, _, source_path = logAlert.resource.partition(':')
2013-03-05 13:39:20 +00:00
document = {
2014-03-17 10:56:58 +00:00
'@message': logAlert.text,
2013-04-11 16:14:20 +00:00
'@source': logAlert.resource,
'@source_host': source_host,
'@source_path': source_path,
2013-04-11 16:14:20 +00:00
'@tags': logAlert.tags,
'@timestamp': logAlert.last_receive_time,
'@type': logAlert.event_type,
'@fields': logAlert.get_body()
2013-03-02 16:44:58 +00:00
}
2013-03-05 13:39:20 +00:00
LOG.debug('Index payload %s', document)
2013-03-05 13:39:20 +00:00
index_url = "http://%s:%s/%s/%s" % (CONF.es_host, CONF.es_port,
2014-03-22 10:03:13 +00:00
logAlert.last_receive_time.strftime(CONF.es_index), logAlert.event_type)
2013-03-04 14:39:21 +00:00
LOG.debug('Index URL: %s', index_url)
try:
response = urllib2.urlopen(index_url, json.dumps(document, cls=DateEncoder)).read()
except Exception, e:
2013-04-11 16:14:20 +00:00
LOG.error('%s : Alert indexing to %s failed - %s', logAlert.last_receive_id, index_url, e)
return
try:
es_id = json.loads(response)['_id']
2013-04-11 16:14:20 +00:00
LOG.info('%s : Alert indexed at %s/%s', logAlert.last_receive_id, index_url, es_id)
except Exception, e:
LOG.error('%s : Could not parse elasticsearch reponse: %s', e)
2013-03-08 20:57:13 +00:00
2014-03-20 23:07:22 +00:00
message.ack()
2013-03-08 20:57:13 +00:00
class LoggerDaemon(Daemon):
logger_opts = {
'es_host': 'localhost',
'es_port': 9200,
'es_index': 'alerta-%Y.%m.%d', # NB. Kibana config must match this index
}
def __init__(self, prog, **kwargs):
config.register_opts(LoggerDaemon.logger_opts)
Daemon.__init__(self, prog, kwargs)
2013-03-08 20:57:13 +00:00
def run(self):
2014-03-22 16:28:15 +00:00
logger = LoggerMessage()
logger.start()
2013-03-08 20:57:13 +00:00
2014-04-07 19:41:07 +00:00
self.api = ApiClient()
2013-03-08 20:57:13 +00:00
2014-03-20 23:07:22 +00:00
try:
while True:
2013-03-08 20:57:13 +00:00
LOG.debug('Send heartbeat...')
2014-04-04 09:10:52 +00:00
heartbeat = Heartbeat(tags=[__version__])
2014-04-07 19:41:07 +00:00
try:
self.api.send(heartbeat)
except Exception, e:
LOG.warning('Failed to send heartbeat: %s', e)
2014-03-20 23:07:22 +00:00
time.sleep(CONF.loop_every)
except (KeyboardInterrupt, SystemExit):
2014-03-22 16:28:15 +00:00
logger.should_stop = True