2022-04-27 21:55:02 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
2023-02-13 18:11:14 +01:00
|
|
|
import calendar
|
2022-04-27 21:55:02 +02:00
|
|
|
import gzip
|
2022-05-05 02:32:47 +02:00
|
|
|
import logging
|
2022-04-27 21:55:02 +02:00
|
|
|
import os
|
|
|
|
import re
|
2022-05-05 02:23:39 +02:00
|
|
|
import signal
|
2022-04-27 21:55:02 +02:00
|
|
|
import sys
|
2024-02-26 16:44:27 +01:00
|
|
|
from datetime import date, datetime, timedelta, timezone
|
2022-04-27 21:55:02 +02:00
|
|
|
from enum import Enum, auto
|
2024-07-12 02:30:17 +02:00
|
|
|
from typing import Match, Optional, TextIO
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
sys.path.append(ZULIP_PATH)
|
2022-05-05 23:11:17 +02:00
|
|
|
|
|
|
|
from scripts.lib.setup_path import setup_path
|
|
|
|
|
|
|
|
setup_path()
|
|
|
|
|
|
|
|
os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
|
|
|
|
|
2022-07-21 07:33:01 +02:00
|
|
|
from typing import Protocol
|
|
|
|
|
2022-05-05 23:11:17 +02:00
|
|
|
from django.conf import settings
|
|
|
|
|
2022-04-29 01:23:27 +02:00
|
|
|
from scripts.lib.zulip_tools import (
|
|
|
|
BOLD,
|
|
|
|
CYAN,
|
|
|
|
ENDC,
|
|
|
|
FAIL,
|
|
|
|
GRAY,
|
|
|
|
OKBLUE,
|
|
|
|
get_config,
|
|
|
|
get_config_file,
|
|
|
|
)
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
|
|
|
def parser() -> argparse.ArgumentParser:
|
2022-05-05 02:30:50 +02:00
|
|
|
parser = argparse.ArgumentParser(description="Search logfiles, ignoring commonly-fetched URLs.")
|
2022-04-27 21:55:02 +02:00
|
|
|
log_selection = parser.add_argument_group("File selection")
|
|
|
|
log_selection_options = log_selection.add_mutually_exclusive_group()
|
2022-04-29 01:23:27 +02:00
|
|
|
access_log_retention_days = int(
|
|
|
|
get_config(get_config_file(), "application_server", "access_log_retention_days", "14")
|
|
|
|
)
|
2022-04-27 21:55:02 +02:00
|
|
|
log_selection_options.add_argument(
|
|
|
|
"--log-files",
|
|
|
|
"-n",
|
|
|
|
help="Number of log files to search",
|
2022-04-29 01:23:27 +02:00
|
|
|
choices=range(1, access_log_retention_days + 2),
|
2024-03-01 02:57:55 +01:00
|
|
|
metavar=f"[1-{access_log_retention_days + 1}]",
|
2022-04-27 21:55:02 +02:00
|
|
|
type=int,
|
|
|
|
)
|
|
|
|
log_selection_options.add_argument(
|
|
|
|
"--all-logs",
|
|
|
|
"-A",
|
|
|
|
help="Parse all logfiles, not just most recent",
|
|
|
|
action="store_true",
|
|
|
|
)
|
|
|
|
log_selection_options.add_argument(
|
|
|
|
"--min-hours",
|
|
|
|
"-H",
|
|
|
|
help="Estimated minimum number of hours; includes previous log file, if estimated less than this",
|
|
|
|
type=int,
|
2023-09-01 23:05:08 +02:00
|
|
|
choices=range(24),
|
2022-04-27 21:55:02 +02:00
|
|
|
default=3,
|
|
|
|
)
|
2022-04-29 02:32:35 +02:00
|
|
|
log_selection.add_argument(
|
|
|
|
"--nginx",
|
|
|
|
"-N",
|
|
|
|
help="Parse from NGINX logs, not server.log",
|
|
|
|
action="store_true",
|
|
|
|
)
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
filtering = parser.add_argument_group("Filtering")
|
2022-05-05 02:32:47 +02:00
|
|
|
filtering.add_argument(
|
2022-05-05 20:44:47 +02:00
|
|
|
"filter_terms",
|
2024-02-26 16:31:47 +01:00
|
|
|
help="IP address, hostname, user-id, HTTP method, path, datetime prefix, or status code to search for; multiple are AND'ed together",
|
2022-05-05 20:44:47 +02:00
|
|
|
nargs="+",
|
2022-05-05 02:32:47 +02:00
|
|
|
)
|
2022-04-27 21:55:02 +02:00
|
|
|
filtering.add_argument(
|
|
|
|
"--all-lines",
|
|
|
|
"-L",
|
|
|
|
help="Show all matching lines; equivalent to -suemtpr",
|
|
|
|
action="store_true",
|
|
|
|
)
|
|
|
|
filtering.add_argument("--static", "-s", help="Include static file paths", action="store_true")
|
|
|
|
filtering.add_argument("--uploads", "-u", help="Include file upload paths", action="store_true")
|
2022-05-05 21:29:30 +02:00
|
|
|
filtering.add_argument("--avatars", "-a", help="Include avatar paths", action="store_true")
|
2022-04-27 21:55:02 +02:00
|
|
|
filtering.add_argument("--events", "-e", help="Include event fetch paths", action="store_true")
|
|
|
|
filtering.add_argument("--messages", "-m", help="Include message paths", action="store_true")
|
|
|
|
filtering.add_argument(
|
|
|
|
"--typing",
|
|
|
|
"-t",
|
|
|
|
help="Include typing notification path",
|
|
|
|
action="store_true",
|
|
|
|
)
|
|
|
|
filtering.add_argument("--presence", "-p", help="Include presence paths", action="store_true")
|
|
|
|
filtering.add_argument(
|
2024-02-26 15:51:05 +01:00
|
|
|
"--report", "-r", help="Include Sentry reporting paths", action="store_true"
|
2022-04-27 21:55:02 +02:00
|
|
|
)
|
2022-06-28 03:04:17 +02:00
|
|
|
filtering.add_argument(
|
|
|
|
"--no-other", "-O", help="Exclude paths not explicitly included", action="store_true"
|
|
|
|
)
|
2024-02-26 15:58:25 +01:00
|
|
|
filtering.add_argument(
|
|
|
|
"--client",
|
|
|
|
"--user-agent",
|
|
|
|
"-C",
|
|
|
|
help="Only include requests whose client/user-agent contains this string",
|
|
|
|
)
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
output = parser.add_argument_group("Output")
|
|
|
|
output.add_argument("--full-line", "-F", help="Show full matching line", action="store_true")
|
2023-02-13 18:11:14 +01:00
|
|
|
output.add_argument("--timeline", "-T", help="Show start, end, and gaps", action="store_true")
|
2022-04-27 21:55:02 +02:00
|
|
|
return parser
|
|
|
|
|
|
|
|
|
|
|
|
def maybe_gzip(logfile_name: str) -> TextIO:
|
|
|
|
if logfile_name.endswith(".gz"):
|
|
|
|
return gzip.open(logfile_name, "rt")
|
2023-02-02 03:40:54 +01:00
|
|
|
return open(logfile_name) # noqa: SIM115
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
|
|
|
NGINX_LOG_LINE_RE = re.compile(
|
|
|
|
r"""
|
|
|
|
(?P<ip> \S+ ) \s+
|
|
|
|
- \s+
|
|
|
|
(?P<user> \S+ ) \s+
|
|
|
|
\[
|
|
|
|
(?P<date> \d+/\w+/\d+ )
|
|
|
|
:
|
|
|
|
(?P<time> \d+:\d+:\d+ )
|
2022-05-05 21:22:23 +02:00
|
|
|
\s+ [+-]\d+
|
2022-04-27 21:55:02 +02:00
|
|
|
\] \s+
|
|
|
|
"
|
|
|
|
(?P<method> \S+ )
|
|
|
|
\s+
|
2024-01-22 17:42:49 +01:00
|
|
|
(?P<full_path> (?P<path> [^"?]+ ) (\?[^"]*)? )
|
2022-04-27 21:55:02 +02:00
|
|
|
\s+
|
|
|
|
(?P<http_version> HTTP/[^"]+ )
|
|
|
|
" \s+
|
|
|
|
(?P<code> \d+ ) \s+
|
|
|
|
(?P<bytes> \d+ ) \s+
|
|
|
|
"(?P<referer> [^"]* )" \s+
|
|
|
|
"(?P<user_agent> [^"]* )" \s+
|
|
|
|
(?P<hostname> \S+ ) \s+
|
|
|
|
(?P<duration> \S+ )
|
|
|
|
""",
|
2024-06-30 20:29:07 +02:00
|
|
|
re.VERBOSE,
|
2022-04-27 21:55:02 +02:00
|
|
|
)
|
|
|
|
|
2022-04-29 02:32:35 +02:00
|
|
|
PYTHON_LOG_LINE_RE = re.compile(
|
|
|
|
r"""
|
|
|
|
(?P<date> \d+-\d+-\d+ ) \s+
|
|
|
|
(?P<time> \d+:\d+:\d+\.\d+ ) \s+
|
|
|
|
INFO \s+ # All access log lines are INFO
|
2022-05-05 21:22:39 +02:00
|
|
|
(pid:\d+ \s+) ?
|
2022-04-29 02:32:35 +02:00
|
|
|
\[ (?P<source> zr(:\d+)?) \] \s+
|
|
|
|
(?P<ip>
|
|
|
|
\d{1,3}(\.\d{1,3}){3}
|
|
|
|
| ([a-f0-9:]+:+){1,7}[a-f0-9]*
|
|
|
|
) \s+
|
|
|
|
(?P<method> [A-Z]+ ) \s+
|
|
|
|
(?P<code> \d+ ) \s+
|
|
|
|
(?P<duration> \S+ ) \s+ # This can be "217ms" or "1.7s"
|
|
|
|
( \( [^)]+ \) \s+ )*
|
2024-01-22 17:42:49 +01:00
|
|
|
(?P<full_path> (?P<path> /\S* ) ) \s+
|
2022-04-29 02:32:35 +02:00
|
|
|
.* # Multiple extra things can go here
|
|
|
|
\(
|
|
|
|
(?P<user>
|
|
|
|
( (?P<user_id> \d+ ) | unauth )
|
|
|
|
@
|
|
|
|
(?P<hostname> \S+ )
|
|
|
|
| zulip-server:\S+
|
|
|
|
| scim-client:\S+
|
|
|
|
| internal
|
|
|
|
) \s+ via \s+ (?P<user_agent> .* )
|
|
|
|
\)
|
|
|
|
""",
|
2024-06-30 20:29:07 +02:00
|
|
|
re.VERBOSE,
|
2022-04-29 02:32:35 +02:00
|
|
|
)
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
class FilterType(Enum):
|
|
|
|
HOSTNAME = auto()
|
|
|
|
CLIENT_IP = auto()
|
2022-04-29 02:54:33 +02:00
|
|
|
USER_ID = auto()
|
2023-03-22 17:00:28 +01:00
|
|
|
METHOD = auto()
|
2022-05-05 02:28:37 +02:00
|
|
|
PATH = auto()
|
2022-05-05 02:32:47 +02:00
|
|
|
STATUS = auto()
|
2024-02-26 16:31:47 +01:00
|
|
|
DATETIME = auto()
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
2022-05-05 20:44:47 +02:00
|
|
|
class FilterFunc(Protocol):
|
2024-01-29 00:32:21 +01:00
|
|
|
def __call__(self, m: Match[str], t: str = ...) -> bool: ...
|
2022-05-05 20:44:47 +02:00
|
|
|
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
def main() -> None:
|
|
|
|
args = parser().parse_args()
|
|
|
|
|
2024-02-27 18:43:19 +01:00
|
|
|
(filter_types, filter_funcs, substr_terms) = parse_filters(args)
|
2022-05-05 02:32:47 +02:00
|
|
|
logfile_names = parse_logfile_names(args)
|
2023-02-13 18:11:14 +01:00
|
|
|
if args.timeline and args.nginx:
|
|
|
|
print("! nginx logs not suggested for timeline, due to imprecision", file=sys.stderr)
|
2022-05-05 02:32:47 +02:00
|
|
|
|
2023-02-13 18:10:47 +01:00
|
|
|
use_color = sys.stdout.isatty()
|
2024-02-27 18:43:19 +01:00
|
|
|
lowered_terms = [term.lower() for term in substr_terms]
|
2022-05-05 02:23:39 +02:00
|
|
|
try:
|
|
|
|
for logfile_name in reversed(logfile_names):
|
|
|
|
with maybe_gzip(logfile_name) as logfile:
|
|
|
|
for logline in logfile:
|
|
|
|
# As a performance optimization, just do a substring
|
|
|
|
# check before we parse the line fully
|
2022-05-05 20:44:47 +02:00
|
|
|
lowered = logline.lower()
|
2023-03-22 16:58:29 +01:00
|
|
|
if not all(f in lowered for f in lowered_terms):
|
2022-05-05 02:23:39 +02:00
|
|
|
continue
|
2022-04-27 21:55:02 +02:00
|
|
|
|
2022-04-29 02:32:35 +02:00
|
|
|
if args.nginx:
|
2022-05-05 02:23:39 +02:00
|
|
|
match = NGINX_LOG_LINE_RE.match(logline)
|
|
|
|
else:
|
|
|
|
match = PYTHON_LOG_LINE_RE.match(logline)
|
|
|
|
if match is None:
|
|
|
|
# We expect other types of loglines in the Python logfiles
|
|
|
|
if args.nginx:
|
|
|
|
print(f"! Failed to parse:\n{logline}", file=sys.stderr)
|
|
|
|
continue
|
2022-05-05 20:44:47 +02:00
|
|
|
if passes_filters(filter_funcs, match, args):
|
2022-05-05 02:23:39 +02:00
|
|
|
print_line(
|
|
|
|
match,
|
|
|
|
args,
|
2022-05-05 20:44:47 +02:00
|
|
|
filter_types=filter_types,
|
2023-02-13 18:10:47 +01:00
|
|
|
use_color=use_color,
|
2022-05-05 02:23:39 +02:00
|
|
|
)
|
2023-07-11 16:20:09 +02:00
|
|
|
except BrokenPipeError:
|
|
|
|
# Python flushes standard streams on exit; redirect remaining output
|
|
|
|
# to devnull to avoid another BrokenPipeError at shutdown
|
|
|
|
devnull = os.open(os.devnull, os.O_WRONLY)
|
|
|
|
os.dup2(devnull, sys.stdout.fileno())
|
|
|
|
sys.exit(1)
|
2022-05-05 02:23:39 +02:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sys.exit(signal.SIGINT + 128)
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def parse_logfile_names(args: argparse.Namespace) -> list[str]:
|
2022-05-05 22:52:28 +02:00
|
|
|
if args.nginx:
|
|
|
|
base_path = "/var/log/nginx/access.log"
|
|
|
|
else:
|
|
|
|
base_path = "/var/log/zulip/server.log"
|
|
|
|
|
2024-02-26 16:44:27 +01:00
|
|
|
for term in args.filter_terms:
|
|
|
|
date_term = re.match(r"2\d\d\d-\d\d-\d\d", term)
|
|
|
|
if not date_term:
|
|
|
|
continue
|
|
|
|
# They're limiting to a specific day; find the right logfile
|
|
|
|
# which is going to have any matches
|
|
|
|
rotations = int(
|
|
|
|
(datetime.now(tz=timezone.utc).date() - date.fromisoformat(date_term[0]))
|
|
|
|
/ timedelta(days=1)
|
|
|
|
)
|
|
|
|
access_log_retention_days = int(
|
|
|
|
get_config(get_config_file(), "application_server", "access_log_retention_days", "14")
|
|
|
|
)
|
|
|
|
if rotations > access_log_retention_days:
|
|
|
|
raise RuntimeError(f"Date is too old (more than {access_log_retention_days} days ago)")
|
|
|
|
if rotations == 0:
|
|
|
|
return [base_path]
|
|
|
|
if rotations == 1:
|
|
|
|
return [f"{base_path}.1"]
|
|
|
|
else:
|
|
|
|
return [f"{base_path}.{rotations}.gz"]
|
|
|
|
|
2022-05-05 22:52:28 +02:00
|
|
|
logfile_names = [base_path]
|
|
|
|
if args.all_logs:
|
|
|
|
logfile_count = 15
|
|
|
|
elif args.log_files is not None:
|
|
|
|
logfile_count = args.log_files
|
|
|
|
else:
|
|
|
|
# Detect if there was a logfile rotation in the last
|
|
|
|
# (min-hours)-ish hours, and if so include the previous
|
|
|
|
# logfile as well.
|
|
|
|
logfile_count = 1
|
|
|
|
try:
|
|
|
|
current_size = os.path.getsize(base_path)
|
|
|
|
past_size = os.path.getsize(base_path + ".1")
|
|
|
|
if current_size < (args.min_hours / 24.0) * past_size:
|
|
|
|
logfile_count = 2
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
for n in range(1, logfile_count):
|
|
|
|
logname = f"{base_path}.{n}"
|
|
|
|
if n > 1:
|
|
|
|
logname += ".gz"
|
|
|
|
logfile_names.append(logname)
|
|
|
|
return logfile_names
|
|
|
|
|
|
|
|
|
2024-02-26 16:31:47 +01:00
|
|
|
month_no_to_name_lookup = {f"{k:02d}": v for k, v in enumerate(calendar.month_abbr)}
|
|
|
|
month_name_to_no_lookup = {v: k for k, v in month_no_to_name_lookup.items()}
|
2024-02-26 16:30:50 +01:00
|
|
|
|
|
|
|
|
|
|
|
def convert_from_nginx_date(date: str) -> str:
|
|
|
|
day_of_month, month_abbr, year = date.split("/")
|
|
|
|
return f"{year}-{month_name_to_no_lookup[month_abbr]}-{day_of_month}"
|
|
|
|
|
|
|
|
|
2024-02-26 16:31:47 +01:00
|
|
|
def convert_to_nginx_date(date: str) -> str:
|
|
|
|
year, month_no, day_of_month = date.split("-")
|
|
|
|
return f"{day_of_month}/{month_no_to_name_lookup[month_no]}/{year}"
|
|
|
|
|
|
|
|
|
2022-05-05 02:42:45 +02:00
|
|
|
def parse_filters(
|
|
|
|
args: argparse.Namespace,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> tuple[set[FilterType], list[FilterFunc], list[str]]:
|
2022-05-05 02:42:45 +02:00
|
|
|
# The heuristics below are not intended to be precise -- they
|
|
|
|
# certainly count things as "IPv4" or "IPv6" addresses that are
|
|
|
|
# invalid. However, we expect the input here to already be
|
|
|
|
# reasonably well-formed.
|
|
|
|
|
2022-05-05 20:44:47 +02:00
|
|
|
filter_types = set()
|
|
|
|
filter_funcs = []
|
|
|
|
filter_terms = []
|
|
|
|
|
2024-02-26 16:03:21 +01:00
|
|
|
if args.events and not args.nginx:
|
|
|
|
logging.warning("Adding --nginx -- /events requests do not appear in Django logs.")
|
|
|
|
args.nginx = True
|
|
|
|
|
2022-05-05 20:44:47 +02:00
|
|
|
for filter_term in args.filter_terms:
|
|
|
|
if re.match(r"[1-5][0-9][0-9]$", filter_term):
|
|
|
|
filter_func = lambda m, t=filter_term: m["code"] == t
|
|
|
|
filter_type = FilterType.STATUS
|
|
|
|
if not args.nginx and filter_term == "502":
|
|
|
|
logging.warning("Adding --nginx -- 502's do not appear in Django logs.")
|
|
|
|
args.nginx = True
|
|
|
|
elif re.match(r"[1-5]xx$", filter_term):
|
|
|
|
filter_term = filter_term[0]
|
|
|
|
filter_func = lambda m, t=filter_term: m["code"].startswith(t)
|
|
|
|
filter_type = FilterType.STATUS
|
|
|
|
elif re.match(r"\d+$", filter_term):
|
|
|
|
if args.nginx:
|
|
|
|
raise parser().error("Cannot parse user-ids with nginx logs; try without --nginx")
|
|
|
|
filter_func = lambda m, t=filter_term: m["user_id"] == t
|
|
|
|
filter_type = FilterType.USER_ID
|
|
|
|
elif re.match(r"\d{1,3}(\.\d{1,3}){3}$", filter_term):
|
|
|
|
filter_func = lambda m, t=filter_term: m["ip"] == t
|
|
|
|
filter_type = FilterType.CLIENT_IP
|
|
|
|
elif re.match(r"([a-f0-9:]+:+){1,7}[a-f0-9]+$", filter_term):
|
|
|
|
filter_func = lambda m, t=filter_term: m["ip"] == t
|
|
|
|
filter_type = FilterType.CLIENT_IP
|
2023-03-22 17:00:28 +01:00
|
|
|
elif re.match(r"DELETE|GET|HEAD|OPTIONS|PATCH|POST|PUT", filter_term):
|
|
|
|
filter_func = lambda m, t=filter_term: m["method"].upper() == t
|
|
|
|
filter_type = FilterType.METHOD
|
2024-02-26 16:31:47 +01:00
|
|
|
elif re.match(r"(2\d\d\d-\d\d-\d\d)( \d(\d(:(\d(\d(:(\d\d?)?)?)?)?)?)?)?", filter_term):
|
|
|
|
if args.nginx:
|
|
|
|
datetime_parts = filter_term.split(" ")
|
|
|
|
filter_term = ":".join(
|
|
|
|
[convert_to_nginx_date(datetime_parts[0]), *datetime_parts[1:]]
|
|
|
|
)
|
|
|
|
filter_func = lambda m, t=filter_term: f"{m['date']}:{m['time']}".startswith(t)
|
|
|
|
else:
|
|
|
|
filter_func = lambda m, t=filter_term: f"{m['date']} {m['time']}".startswith(t)
|
|
|
|
filter_type = FilterType.DATETIME
|
2022-05-05 20:44:47 +02:00
|
|
|
elif re.match(r"[a-z0-9]([a-z0-9-]*[a-z0-9])?$", filter_term.lower()):
|
|
|
|
filter_term = filter_term.lower()
|
|
|
|
if args.nginx:
|
|
|
|
filter_func = lambda m, t=filter_term: m["hostname"].startswith(t + ".")
|
|
|
|
else:
|
|
|
|
filter_func = lambda m, t=filter_term: m["hostname"] == t
|
|
|
|
filter_type = FilterType.HOSTNAME
|
|
|
|
elif re.match(r"[a-z0-9-]+(\.[a-z0-9-]+)+$", filter_term.lower()) and re.search(
|
|
|
|
r"[a-z-]", filter_term.lower()
|
|
|
|
):
|
|
|
|
if not args.nginx:
|
|
|
|
raise parser().error("Cannot parse full domains with Python logs; try --nginx")
|
|
|
|
filter_term = filter_term.lower()
|
|
|
|
filter_func = lambda m, t=filter_term: m["hostname"] == t
|
|
|
|
filter_type = FilterType.HOSTNAME
|
|
|
|
elif re.match(r"/\S*$", filter_term):
|
|
|
|
filter_func = lambda m, t=filter_term: m["path"] == t
|
|
|
|
filter_type = FilterType.PATH
|
|
|
|
args.all_lines = True
|
2022-05-05 02:42:45 +02:00
|
|
|
else:
|
2022-05-05 20:44:47 +02:00
|
|
|
raise RuntimeError(
|
2024-02-26 16:09:08 +01:00
|
|
|
f"Can't parse {filter_term} as an IP, hostname, user-id, HTTP method, path, or status code."
|
2022-05-05 20:44:47 +02:00
|
|
|
)
|
|
|
|
if filter_type in filter_types:
|
|
|
|
parser().error("Supplied the same time of value more than once, which cannot match!")
|
|
|
|
filter_types.add(filter_type)
|
|
|
|
filter_funcs.append(filter_func)
|
|
|
|
filter_terms.append(filter_term)
|
|
|
|
|
2024-02-26 15:58:25 +01:00
|
|
|
if args.client:
|
|
|
|
filter_funcs.append(lambda m, t=args.client: t in m["user_agent"])
|
|
|
|
filter_terms.append(args.client)
|
|
|
|
|
2024-02-27 18:43:19 +01:00
|
|
|
return (filter_types, filter_funcs, filter_terms)
|
2022-05-05 02:42:45 +02:00
|
|
|
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
def passes_filters(
|
2024-07-12 02:30:17 +02:00
|
|
|
string_filters: list[FilterFunc],
|
2022-07-05 21:07:35 +02:00
|
|
|
match: Match[str],
|
2022-04-27 21:55:02 +02:00
|
|
|
args: argparse.Namespace,
|
|
|
|
) -> bool:
|
2022-05-05 20:44:47 +02:00
|
|
|
if not all(f(match) for f in string_filters):
|
2022-04-27 21:55:02 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
if args.all_lines:
|
|
|
|
return True
|
|
|
|
|
|
|
|
path = match["path"]
|
2022-06-28 03:04:17 +02:00
|
|
|
if path.startswith("/static/"):
|
|
|
|
return args.static
|
|
|
|
elif path.startswith("/user_uploads/"):
|
|
|
|
return args.uploads
|
|
|
|
elif path.startswith(("/user_avatars/", "/avatar/")):
|
|
|
|
return args.avatars
|
|
|
|
elif re.match(r"/(json|api/v1)/events($|\?|/)", path):
|
|
|
|
return args.events
|
|
|
|
elif path in ("/api/v1/typing", "/json/typing"):
|
|
|
|
return args.typing
|
|
|
|
elif re.match(r"/(json|api/v1)/messages($|\?|/)", path):
|
|
|
|
return args.messages
|
|
|
|
elif path in ("/api/v1/users/me/presence", "/json/users/me/presence"):
|
|
|
|
return args.presence
|
2024-02-26 15:51:05 +01:00
|
|
|
elif path == "/error_tracing":
|
2022-06-28 03:04:17 +02:00
|
|
|
return args.report
|
|
|
|
else:
|
|
|
|
return not args.no_other
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
2023-02-13 18:11:14 +01:00
|
|
|
last_match_end: Optional[datetime] = None
|
|
|
|
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
def print_line(
|
2022-07-05 21:07:35 +02:00
|
|
|
match: Match[str],
|
2022-04-27 21:55:02 +02:00
|
|
|
args: argparse.Namespace,
|
2024-07-12 02:30:17 +02:00
|
|
|
filter_types: set[FilterType],
|
2023-02-13 18:11:14 +01:00
|
|
|
use_color: bool,
|
2022-04-27 21:55:02 +02:00
|
|
|
) -> None:
|
2023-02-13 18:11:14 +01:00
|
|
|
global last_match_end
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
if args.full_line:
|
|
|
|
print(match.group(0))
|
|
|
|
return
|
|
|
|
|
2023-02-13 18:11:14 +01:00
|
|
|
if args.nginx:
|
2024-02-26 16:30:50 +01:00
|
|
|
date = convert_from_nginx_date(match["date"])
|
2023-02-13 18:11:14 +01:00
|
|
|
else:
|
|
|
|
date = match["date"]
|
2024-03-01 02:50:10 +01:00
|
|
|
if args.all_logs or (args.log_files is not None and args.log_files > 1):
|
2023-02-13 18:11:14 +01:00
|
|
|
ts = date + " " + match["time"]
|
2022-04-27 21:55:02 +02:00
|
|
|
else:
|
|
|
|
ts = match["time"]
|
2022-04-29 03:07:41 +02:00
|
|
|
|
|
|
|
if match["duration"].endswith("ms"):
|
2023-02-13 18:11:14 +01:00
|
|
|
duration_ms = int(match["duration"][:-2])
|
2022-04-29 03:07:41 +02:00
|
|
|
else:
|
2023-02-13 18:11:14 +01:00
|
|
|
duration_ms = int(float(match["duration"][:-1]) * 1000)
|
2022-04-29 03:07:41 +02:00
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
code = int(match["code"])
|
|
|
|
indicator = " "
|
|
|
|
color = ""
|
|
|
|
if code == 401:
|
|
|
|
indicator = ":"
|
|
|
|
color = CYAN
|
|
|
|
elif code == 499:
|
|
|
|
indicator = "-"
|
|
|
|
color = GRAY
|
|
|
|
elif code >= 400 and code < 499:
|
|
|
|
indicator = ">"
|
|
|
|
color = OKBLUE
|
|
|
|
elif code >= 500 and code <= 599:
|
|
|
|
indicator = "!"
|
|
|
|
color = FAIL
|
2023-02-13 18:10:47 +01:00
|
|
|
|
|
|
|
if use_color:
|
2024-01-22 17:42:49 +01:00
|
|
|
url = f"{BOLD}{match['full_path']}"
|
2023-02-13 18:10:47 +01:00
|
|
|
else:
|
2024-01-22 17:42:49 +01:00
|
|
|
url = match["full_path"]
|
2023-02-13 18:10:47 +01:00
|
|
|
color = ""
|
|
|
|
|
2022-05-05 20:44:47 +02:00
|
|
|
if FilterType.HOSTNAME not in filter_types:
|
2022-04-29 02:54:33 +02:00
|
|
|
hostname = match["hostname"]
|
2022-05-05 02:33:13 +02:00
|
|
|
if hostname is None:
|
2022-05-05 23:11:17 +02:00
|
|
|
hostname = "???." + settings.EXTERNAL_HOST
|
2022-05-05 02:33:13 +02:00
|
|
|
elif not args.nginx:
|
2022-05-05 23:11:17 +02:00
|
|
|
if hostname != "root":
|
|
|
|
hostname += "." + settings.EXTERNAL_HOST
|
|
|
|
elif settings.EXTERNAL_HOST == "zulipchat.com":
|
2022-04-29 02:54:33 +02:00
|
|
|
hostname = "zulip.com"
|
|
|
|
else:
|
2022-05-05 23:11:17 +02:00
|
|
|
hostname = settings.EXTERNAL_HOST
|
2022-04-29 02:54:33 +02:00
|
|
|
url = "https://" + hostname + url
|
|
|
|
|
|
|
|
user_id = ""
|
|
|
|
if not args.nginx and match["user_id"] is not None:
|
|
|
|
user_id = match["user_id"] + "@"
|
2022-04-27 21:55:02 +02:00
|
|
|
|
2023-02-13 18:11:14 +01:00
|
|
|
if args.timeline:
|
|
|
|
logline_end = datetime.fromisoformat(date + " " + match["time"])
|
|
|
|
logline_start = logline_end - timedelta(milliseconds=duration_ms)
|
|
|
|
if last_match_end is not None:
|
|
|
|
gap_ms = int((logline_start - last_match_end) / timedelta(milliseconds=1))
|
|
|
|
if gap_ms > 5000:
|
|
|
|
print()
|
2024-03-01 02:57:55 +01:00
|
|
|
print(f"========== {int(gap_ms / 1000):>4} second gap ==========")
|
2023-02-13 18:11:14 +01:00
|
|
|
print()
|
|
|
|
elif gap_ms > 1000:
|
|
|
|
print(f"============ {gap_ms:>5}ms gap ============")
|
|
|
|
elif gap_ms > 0:
|
|
|
|
print(f"------------ {gap_ms:>5}ms gap ------------")
|
|
|
|
else:
|
|
|
|
print(f"!!!!!!!!!! {abs(gap_ms):>5}ms overlap !!!!!!!!!!")
|
2024-03-01 02:50:10 +01:00
|
|
|
if args.all_logs or (args.log_files is not None and args.log_files > 1):
|
2023-02-13 18:11:14 +01:00
|
|
|
print(logline_start.isoformat(" ", timespec="milliseconds") + " (start)")
|
|
|
|
else:
|
|
|
|
print(logline_start.time().isoformat(timespec="milliseconds") + " (start)")
|
|
|
|
last_match_end = logline_end
|
|
|
|
|
2022-04-27 21:55:02 +02:00
|
|
|
parts = [
|
|
|
|
ts,
|
2023-02-13 18:11:14 +01:00
|
|
|
f"{duration_ms:>5}ms",
|
2022-05-05 20:44:47 +02:00
|
|
|
f"{user_id:7}" if not args.nginx and FilterType.USER_ID not in filter_types else None,
|
|
|
|
f"{match['ip']:39}" if FilterType.CLIENT_IP not in filter_types else None,
|
2022-04-27 21:55:02 +02:00
|
|
|
indicator + match["code"],
|
|
|
|
f"{match['method']:6}",
|
|
|
|
url,
|
|
|
|
]
|
|
|
|
|
2023-09-12 23:19:57 +02:00
|
|
|
print(color + " ".join(p for p in parts if p is not None) + (ENDC if use_color else ""))
|
2022-04-27 21:55:02 +02:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|