2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timezone
|
2019-02-02 23:53:55 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Tuple
|
2016-08-31 21:23:20 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2017-11-16 00:43:10 +01:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2016-08-31 21:23:20 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
from zerver.decorator import webhook_view
|
2017-11-16 00:43:10 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
|
|
|
from zerver.lib.response import json_error, json_success
|
2018-03-16 22:53:50 +01:00
|
|
|
from zerver.lib.webhooks.common import check_send_webhook_message
|
2017-05-02 01:00:50 +02:00
|
|
|
from zerver.models import UserProfile
|
2016-08-31 21:23:20 +02:00
|
|
|
|
|
|
|
ALERT_CLEAR = 'clear'
|
|
|
|
ALERT_VIOLATION = 'violations'
|
|
|
|
SNAPSHOT = 'image_url'
|
|
|
|
|
2017-11-05 11:53:59 +01:00
|
|
|
class LibratoWebhookParser:
|
2016-08-31 21:23:20 +02:00
|
|
|
ALERT_URL_TEMPLATE = "https://metrics.librato.com/alerts#/{alert_id}"
|
|
|
|
|
2017-11-04 07:47:46 +01:00
|
|
|
def __init__(self, payload: Dict[str, Any], attachments: List[Dict[str, Any]]) -> None:
|
2016-08-31 21:23:20 +02:00
|
|
|
self.payload = payload
|
|
|
|
self.attachments = attachments
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def generate_alert_url(self, alert_id: int) -> str:
|
2016-08-31 21:23:20 +02:00
|
|
|
return self.ALERT_URL_TEMPLATE.format(alert_id=alert_id)
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def parse_alert(self) -> Tuple[int, str, str, str]:
|
2016-08-31 21:23:20 +02:00
|
|
|
alert = self.payload['alert']
|
|
|
|
alert_id = alert['id']
|
|
|
|
return alert_id, alert['name'], self.generate_alert_url(alert_id), alert['runbook_url']
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def parse_condition(self, condition: Dict[str, Any]) -> Tuple[str, str, str, str]:
|
2016-08-31 21:23:20 +02:00
|
|
|
summary_function = condition['summary_function']
|
|
|
|
threshold = condition.get('threshold', '')
|
|
|
|
condition_type = condition['type']
|
|
|
|
duration = condition.get('duration', '')
|
|
|
|
return summary_function, threshold, condition_type, duration
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def parse_violation(self, violation: Dict[str, Any]) -> Tuple[str, str]:
|
2016-08-31 21:23:20 +02:00
|
|
|
metric_name = violation['metric']
|
2017-02-26 09:10:14 +01:00
|
|
|
recorded_at = datetime.fromtimestamp((violation['recorded_at']),
|
2020-06-05 06:55:20 +02:00
|
|
|
tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
2016-08-31 21:23:20 +02:00
|
|
|
return metric_name, recorded_at
|
|
|
|
|
2017-11-04 07:47:46 +01:00
|
|
|
def parse_conditions(self) -> List[Dict[str, Any]]:
|
2016-08-31 21:23:20 +02:00
|
|
|
conditions = self.payload['conditions']
|
|
|
|
return conditions
|
|
|
|
|
2017-11-04 07:47:46 +01:00
|
|
|
def parse_violations(self) -> List[Dict[str, Any]]:
|
2016-08-31 21:23:20 +02:00
|
|
|
violations = self.payload['violations']['test-source']
|
|
|
|
return violations
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def parse_snapshot(self, snapshot: Dict[str, Any]) -> Tuple[str, str, str]:
|
2016-08-31 21:23:20 +02:00
|
|
|
author_name, image_url, title = snapshot['author_name'], snapshot['image_url'], snapshot['title']
|
|
|
|
return author_name, image_url, title
|
|
|
|
|
|
|
|
class LibratoWebhookHandler(LibratoWebhookParser):
|
2017-11-04 07:47:46 +01:00
|
|
|
def __init__(self, payload: Dict[str, Any], attachments: List[Dict[str, Any]]) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(payload, attachments)
|
2016-08-31 21:23:20 +02:00
|
|
|
self.payload_available_types = {
|
|
|
|
ALERT_CLEAR: self.handle_alert_clear_message,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
ALERT_VIOLATION: self.handle_alert_violation_message,
|
2016-08-31 21:23:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
self.attachments_available_types = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
SNAPSHOT: self.handle_snapshots,
|
2016-08-31 21:23:20 +02:00
|
|
|
}
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def find_handle_method(self) -> Callable[[], str]:
|
2016-08-31 21:23:20 +02:00
|
|
|
for available_type in self.payload_available_types:
|
|
|
|
if self.payload.get(available_type):
|
|
|
|
return self.payload_available_types[available_type]
|
|
|
|
for available_type in self.attachments_available_types:
|
|
|
|
if self.attachments[0].get(available_type):
|
|
|
|
return self.attachments_available_types[available_type]
|
|
|
|
raise Exception("Unexcepted message type")
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def handle(self) -> str:
|
2016-08-31 21:23:20 +02:00
|
|
|
return self.find_handle_method()()
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def generate_topic(self) -> str:
|
2016-08-31 21:23:20 +02:00
|
|
|
if self.attachments:
|
|
|
|
return "Snapshots"
|
|
|
|
topic_template = "Alert {alert_name}"
|
|
|
|
alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
|
|
|
|
return topic_template.format(alert_name=alert_name)
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def handle_alert_clear_message(self) -> str:
|
2017-02-26 09:10:14 +01:00
|
|
|
alert_clear_template = "Alert [alert_name]({alert_url}) has cleared at {trigger_time} UTC!"
|
|
|
|
trigger_time = datetime.fromtimestamp((self.payload['trigger_time']),
|
2020-06-05 06:55:20 +02:00
|
|
|
tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
|
2016-08-31 21:23:20 +02:00
|
|
|
alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
|
2017-11-05 02:48:25 +01:00
|
|
|
content = alert_clear_template.format(alert_name=alert_name,
|
|
|
|
alert_url=alert_url,
|
|
|
|
trigger_time=trigger_time)
|
2016-08-31 21:23:20 +02:00
|
|
|
return content
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def handle_snapshots(self) -> str:
|
2020-04-09 21:51:58 +02:00
|
|
|
content = ''
|
2016-08-31 21:23:20 +02:00
|
|
|
for attachment in self.attachments:
|
|
|
|
content += self.handle_snapshot(attachment)
|
|
|
|
return content
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def handle_snapshot(self, snapshot: Dict[str, Any]) -> str:
|
2020-04-09 21:51:58 +02:00
|
|
|
snapshot_template = "**{author_name}** sent a [snapshot]({image_url}) of [metric]({title})."
|
2016-08-31 21:23:20 +02:00
|
|
|
author_name, image_url, title = self.parse_snapshot(snapshot)
|
|
|
|
content = snapshot_template.format(author_name=author_name, image_url=image_url, title=title)
|
|
|
|
return content
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def handle_alert_violation_message(self) -> str:
|
2020-04-09 21:51:58 +02:00
|
|
|
alert_violation_template = "Alert [alert_name]({alert_url}) has triggered! "
|
2016-08-31 21:23:20 +02:00
|
|
|
alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
|
|
|
|
content = alert_violation_template.format(alert_name=alert_name, alert_url=alert_url)
|
|
|
|
if alert_runbook_url:
|
2020-04-09 21:51:58 +02:00
|
|
|
alert_runbook_template = "[Reaction steps]({alert_runbook_url}):"
|
2016-08-31 21:23:20 +02:00
|
|
|
content += alert_runbook_template.format(alert_runbook_url=alert_runbook_url)
|
|
|
|
content += self.generate_conditions_and_violations()
|
|
|
|
return content
|
|
|
|
|
2018-05-10 19:34:01 +02:00
|
|
|
def generate_conditions_and_violations(self) -> str:
|
2016-08-31 21:23:20 +02:00
|
|
|
conditions = self.parse_conditions()
|
|
|
|
violations = self.parse_violations()
|
2020-04-09 21:51:58 +02:00
|
|
|
content = ""
|
2016-08-31 21:23:20 +02:00
|
|
|
for condition, violation in zip(conditions, violations):
|
|
|
|
content += self.generate_violated_metric_condition(violation, condition)
|
|
|
|
return content
|
|
|
|
|
2017-11-10 04:33:28 +01:00
|
|
|
def generate_violated_metric_condition(self, violation: Dict[str, Any],
|
2018-05-10 19:34:01 +02:00
|
|
|
condition: Dict[str, Any]) -> str:
|
2016-08-31 21:23:20 +02:00
|
|
|
summary_function, threshold, condition_type, duration = self.parse_condition(condition)
|
|
|
|
metric_name, recorded_at = self.parse_violation(violation)
|
2021-02-12 03:52:14 +01:00
|
|
|
metric_condition_template = "\n * Metric `{metric_name}`, {summary_function} was {condition_type} {threshold}"
|
2016-08-31 21:23:20 +02:00
|
|
|
content = metric_condition_template.format(
|
2017-01-24 07:06:13 +01:00
|
|
|
metric_name=metric_name, summary_function=summary_function, condition_type=condition_type,
|
|
|
|
threshold=threshold)
|
2016-08-31 21:23:20 +02:00
|
|
|
if duration:
|
2020-06-09 00:25:09 +02:00
|
|
|
content += f" by {duration}s"
|
|
|
|
content += f", recorded at {recorded_at} UTC."
|
2016-08-31 21:23:20 +02:00
|
|
|
return content
|
|
|
|
|
2020-08-20 00:32:15 +02:00
|
|
|
@webhook_view('Librato')
|
2016-08-31 21:23:20 +02:00
|
|
|
@has_request_variables
|
2017-12-25 10:23:33 +01:00
|
|
|
def api_librato_webhook(request: HttpRequest, user_profile: UserProfile,
|
2020-08-07 01:09:47 +02:00
|
|
|
payload: Dict[str, Any]=REQ(converter=orjson.loads, default={})) -> HttpResponse:
|
2016-08-31 21:23:20 +02:00
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
attachments = orjson.loads(request.body).get('attachments', [])
|
2020-08-12 20:23:23 +02:00
|
|
|
except orjson.JSONDecodeError:
|
2016-08-31 21:23:20 +02:00
|
|
|
attachments = []
|
|
|
|
|
|
|
|
if not attachments and not payload:
|
|
|
|
return json_error(_("Malformed JSON input"))
|
|
|
|
|
|
|
|
message_handler = LibratoWebhookHandler(payload, attachments)
|
2018-03-16 22:53:50 +01:00
|
|
|
topic = message_handler.generate_topic()
|
2016-08-31 21:23:20 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
content = message_handler.handle()
|
|
|
|
except Exception as e:
|
2019-04-20 03:49:03 +02:00
|
|
|
return json_error(str(e))
|
2016-08-31 21:23:20 +02:00
|
|
|
|
2018-03-16 22:53:50 +01:00
|
|
|
check_send_webhook_message(request, user_profile, topic, content)
|
2016-08-31 21:23:20 +02:00
|
|
|
return json_success()
|