2020-06-11 00:54:34 +02:00
|
|
|
import json
|
2020-03-21 13:10:22 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import subprocess
|
2023-12-04 18:35:04 +01:00
|
|
|
import sys
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
2020-03-21 13:10:22 +01:00
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:17 +02:00
|
|
|
from typing import Any
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
|
2023-12-04 18:35:04 +01:00
|
|
|
sys.path.append(ZULIP_PATH)
|
2024-05-22 06:22:22 +02:00
|
|
|
from scripts.lib.zulip_tools import atomic_nagios_write, get_config, get_config_file
|
2023-12-04 18:35:04 +01:00
|
|
|
|
2020-03-21 13:10:22 +01:00
|
|
|
normal_queues = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"deferred_work",
|
|
|
|
"digest_emails",
|
|
|
|
"email_mirror",
|
2022-06-14 02:43:14 +02:00
|
|
|
"email_senders",
|
2021-02-12 08:20:45 +01:00
|
|
|
"embed_links",
|
|
|
|
"embedded_bots",
|
|
|
|
"missedmessage_emails",
|
|
|
|
"missedmessage_mobile_notifications",
|
|
|
|
"outgoing_webhooks",
|
2024-06-20 23:58:27 +02:00
|
|
|
"thumbnail",
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_activity",
|
|
|
|
"user_activity_interval",
|
2020-03-21 13:10:22 +01:00
|
|
|
]
|
|
|
|
|
2023-12-04 18:35:04 +01:00
|
|
|
mobile_notification_shards = int(
|
|
|
|
get_config(get_config_file(), "application_server", "mobile_notification_shards", "1")
|
|
|
|
)
|
|
|
|
|
2020-03-21 13:10:22 +01:00
|
|
|
OK = 0
|
|
|
|
WARNING = 1
|
|
|
|
CRITICAL = 2
|
|
|
|
UNKNOWN = 3
|
|
|
|
|
|
|
|
states = {
|
|
|
|
0: "OK",
|
|
|
|
1: "WARNING",
|
|
|
|
2: "CRITICAL",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
3: "UNKNOWN",
|
2020-03-21 13:10:22 +01:00
|
|
|
}
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
MAX_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
|
2020-03-21 13:10:22 +01:00
|
|
|
lambda: 30,
|
2023-11-09 21:15:43 +01:00
|
|
|
deferred_work=600,
|
2020-04-10 22:14:12 +02:00
|
|
|
digest_emails=1200,
|
|
|
|
missedmessage_mobile_notifications=120,
|
2020-09-17 15:21:44 +02:00
|
|
|
embed_links=60,
|
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2024-07-12 02:30:17 +02:00
|
|
|
CRITICAL_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
|
2020-03-21 13:10:22 +01:00
|
|
|
lambda: 60,
|
2023-11-09 21:15:43 +01:00
|
|
|
deferred_work=900,
|
2020-04-10 22:14:12 +02:00
|
|
|
missedmessage_mobile_notifications=180,
|
2020-09-20 13:35:35 +02:00
|
|
|
digest_emails=1800,
|
2020-09-17 15:21:44 +02:00
|
|
|
embed_links=90,
|
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2020-03-21 13:10:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def analyze_queue_stats(
|
2024-07-12 02:30:17 +02:00
|
|
|
queue_name: str, stats: dict[str, Any], queue_count_rabbitmqctl: int
|
|
|
|
) -> dict[str, Any]:
|
2020-03-21 13:10:22 +01:00
|
|
|
now = int(time.time())
|
|
|
|
if stats == {}:
|
2021-02-12 08:20:45 +01:00
|
|
|
return dict(status=UNKNOWN, name=queue_name, message="invalid or no stats data")
|
2020-03-21 13:10:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if now - stats["update_time"] > 180 and queue_count_rabbitmqctl > 10:
|
2020-03-21 13:10:22 +01:00
|
|
|
# Queue isn't updating the stats file and has some events in
|
|
|
|
# the backlog, it's likely stuck.
|
|
|
|
#
|
2020-09-06 18:26:27 +02:00
|
|
|
# TODO: There's an unlikely race condition here - if the queue
|
|
|
|
# was fully emptied and was idle due to no new events coming
|
|
|
|
# for over 180 seconds, suddenly gets a burst of events and
|
|
|
|
# this code runs exactly in the very small time window between
|
|
|
|
# those events popping up and the queue beginning to process
|
|
|
|
# the first one (which will refresh the stats file at the very
|
|
|
|
# start), we'll incorrectly return the CRITICAL status. The
|
|
|
|
# chance of that happening should be negligible because the queue
|
|
|
|
# worker should wake up immediately and log statistics before
|
|
|
|
# starting to process the first event.
|
2021-02-12 08:19:30 +01:00
|
|
|
return dict(
|
|
|
|
status=CRITICAL,
|
|
|
|
name=queue_name,
|
2021-02-12 08:20:45 +01:00
|
|
|
message="queue appears to be stuck, last update {}, queue size {}".format(
|
|
|
|
stats["update_time"], queue_count_rabbitmqctl
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-03-21 13:10:22 +01:00
|
|
|
|
2020-09-06 17:05:38 +02:00
|
|
|
current_size = queue_count_rabbitmqctl
|
2021-02-12 08:20:45 +01:00
|
|
|
average_consume_time = stats["recent_average_consume_time"]
|
2020-03-21 13:10:22 +01:00
|
|
|
if average_consume_time is None:
|
|
|
|
# Queue just started; we can't effectively estimate anything.
|
|
|
|
#
|
|
|
|
# If the queue is stuck in this state and not processing
|
|
|
|
# anything, eventually the `update_time` rule above will fire.
|
2021-02-12 08:20:45 +01:00
|
|
|
return dict(status=OK, name=queue_name, message="")
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
expected_time_to_clear_backlog = current_size * average_consume_time
|
2020-09-20 13:35:35 +02:00
|
|
|
if expected_time_to_clear_backlog > MAX_SECONDS_TO_CLEAR[queue_name]:
|
|
|
|
if expected_time_to_clear_backlog > CRITICAL_SECONDS_TO_CLEAR[queue_name]:
|
|
|
|
status = CRITICAL
|
|
|
|
else:
|
|
|
|
status = WARNING
|
2020-03-21 13:10:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return dict(
|
|
|
|
status=status,
|
|
|
|
name=queue_name,
|
2021-02-12 08:20:45 +01:00
|
|
|
message=f"clearing the backlog will take too long: {expected_time_to_clear_backlog}s, size: {current_size}",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return dict(status=OK, name=queue_name, message="")
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
|
|
|
|
WARN_COUNT_THRESHOLD_DEFAULT = 10
|
|
|
|
CRITICAL_COUNT_THRESHOLD_DEFAULT = 50
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def check_other_queues(queue_counts_dict: dict[str, int]) -> list[dict[str, Any]]:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Do a simple queue size check for queues whose workers don't publish stats files."""
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
results = []
|
|
|
|
for queue, count in queue_counts_dict.items():
|
|
|
|
if queue in normal_queues:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if count > CRITICAL_COUNT_THRESHOLD_DEFAULT:
|
2021-02-12 08:20:45 +01:00
|
|
|
results.append(dict(status=CRITICAL, name=queue, message=f"count critical: {count}"))
|
2020-03-21 13:10:22 +01:00
|
|
|
elif count > WARN_COUNT_THRESHOLD_DEFAULT:
|
2021-02-12 08:20:45 +01:00
|
|
|
results.append(dict(status=WARNING, name=queue, message=f"count warning: {count}"))
|
2020-03-21 13:10:22 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
results.append(dict(status=OK, name=queue, message=""))
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
return results
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-03-21 13:10:22 +01:00
|
|
|
def check_rabbitmq_queues() -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
pattern = re.compile(r"(\w+)\t(\d+)\t(\d+)")
|
2022-09-17 03:20:47 +02:00
|
|
|
if "USER" in os.environ and os.environ["USER"] not in ["root", "rabbitmq"]:
|
2020-03-21 13:10:22 +01:00
|
|
|
print("This script must be run as the root or rabbitmq user")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
list_queues_output = subprocess.check_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
["/usr/sbin/rabbitmqctl", "list_queues", "name", "messages", "consumers"],
|
2022-01-22 07:52:54 +01:00
|
|
|
text=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-09-02 08:14:51 +02:00
|
|
|
queue_counts_rabbitmqctl = {}
|
2020-09-29 23:32:27 +02:00
|
|
|
queues_with_consumers = []
|
2020-03-21 13:10:22 +01:00
|
|
|
for line in list_queues_output.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
m = pattern.match(line)
|
|
|
|
if m:
|
|
|
|
queue = m.group(1)
|
|
|
|
count = int(m.group(2))
|
2020-09-29 23:32:27 +02:00
|
|
|
consumers = int(m.group(3))
|
2020-03-21 13:10:22 +01:00
|
|
|
queue_counts_rabbitmqctl[queue] = count
|
2020-09-29 23:32:27 +02:00
|
|
|
if consumers > 0 and not queue.startswith("notify_tornado"):
|
|
|
|
queues_with_consumers.append(queue)
|
2020-03-21 13:10:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
queue_stats_dir = subprocess.check_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
[os.path.join(ZULIP_PATH, "scripts/get-django-setting"), "QUEUE_STATS_DIR"],
|
2022-01-22 07:52:54 +01:00
|
|
|
text=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
).strip()
|
2024-07-12 02:30:17 +02:00
|
|
|
queue_stats: dict[str, dict[str, Any]] = {}
|
2024-07-22 03:26:12 +02:00
|
|
|
|
2023-12-04 18:35:04 +01:00
|
|
|
check_queues = normal_queues
|
|
|
|
if mobile_notification_shards > 1:
|
2024-07-22 03:26:12 +02:00
|
|
|
# For sharded queue workers, where there's a separate queue
|
|
|
|
# for each shard, we need to make sure none of those are
|
|
|
|
# backlogged.
|
2023-12-04 18:35:04 +01:00
|
|
|
check_queues += [
|
|
|
|
f"missedmessage_mobile_notifications_shard{d}"
|
|
|
|
for d in range(1, mobile_notification_shards + 1)
|
|
|
|
]
|
2024-07-22 03:26:12 +02:00
|
|
|
|
2023-12-04 18:35:04 +01:00
|
|
|
queues_to_check = set(check_queues).intersection(set(queues_with_consumers))
|
2020-03-21 13:10:22 +01:00
|
|
|
for queue in queues_to_check:
|
|
|
|
fn = queue + ".stats"
|
|
|
|
file_path = os.path.join(queue_stats_dir, fn)
|
|
|
|
if not os.path.exists(file_path):
|
|
|
|
queue_stats[queue] = {}
|
|
|
|
continue
|
|
|
|
|
2020-04-10 01:58:24 +02:00
|
|
|
with open(file_path) as f:
|
2020-03-21 13:10:22 +01:00
|
|
|
try:
|
|
|
|
queue_stats[queue] = json.load(f)
|
|
|
|
except json.decoder.JSONDecodeError:
|
|
|
|
queue_stats[queue] = {}
|
|
|
|
|
|
|
|
results = []
|
|
|
|
for queue_name, stats in queue_stats.items():
|
2020-04-10 21:36:13 +02:00
|
|
|
results.append(analyze_queue_stats(queue_name, stats, queue_counts_rabbitmqctl[queue_name]))
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
results.extend(check_other_queues(queue_counts_rabbitmqctl))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
status = max(result["status"] for result in results)
|
2020-03-21 13:10:22 +01:00
|
|
|
|
|
|
|
if status > 0:
|
|
|
|
queue_error_template = "queue {} problem: {}:{}"
|
2021-02-12 08:20:45 +01:00
|
|
|
error_message = "; ".join(
|
|
|
|
queue_error_template.format(result["name"], states[result["status"]], result["message"])
|
2021-02-12 08:19:30 +01:00
|
|
|
for result in results
|
2021-02-12 08:20:45 +01:00
|
|
|
if result["status"] > 0
|
2020-09-02 06:20:26 +02:00
|
|
|
)
|
2024-05-22 06:22:22 +02:00
|
|
|
sys.exit(
|
|
|
|
atomic_nagios_write(
|
|
|
|
"check-rabbitmq-results",
|
|
|
|
"critical" if status == CRITICAL else "warning",
|
|
|
|
error_message,
|
|
|
|
)
|
|
|
|
)
|
2020-03-21 13:10:22 +01:00
|
|
|
else:
|
2024-05-22 06:22:22 +02:00
|
|
|
atomic_nagios_write("check-rabbitmq-results", "ok", "queues normal")
|