2013-03-06 19:28:41 +01:00
|
|
|
import code
|
2017-10-04 01:29:53 +02:00
|
|
|
import gc
|
|
|
|
import logging
|
|
|
|
import os
|
2013-03-06 19:28:41 +01:00
|
|
|
import signal
|
2017-10-04 01:29:53 +02:00
|
|
|
import socket
|
|
|
|
import threading
|
|
|
|
import traceback
|
|
|
|
import tracemalloc
|
2016-06-04 20:38:42 +02:00
|
|
|
from types import FrameType
|
|
|
|
|
2017-10-04 01:29:53 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.utils.timezone import now as timezone_now
|
2016-06-04 20:38:42 +02:00
|
|
|
from typing import Optional
|
|
|
|
|
2017-10-04 01:29:53 +02:00
|
|
|
logger = logging.getLogger('zulip.debug')
|
|
|
|
|
2013-03-06 19:28:41 +01:00
|
|
|
# Interactive debugging code from
|
2020-03-27 01:32:21 +01:00
|
|
|
# https://stackoverflow.com/questions/132058/showing-the-stack-trace-from-a-running-python-application
|
2013-03-06 19:28:41 +01:00
|
|
|
# (that link also points to code for an interactive remote debugger
|
|
|
|
# setup, which we might want if we move Tornado to run in a daemon
|
|
|
|
# rather than via screen).
|
2017-11-05 11:15:10 +01:00
|
|
|
def interactive_debug(sig: int, frame: FrameType) -> None:
|
2013-03-06 19:28:41 +01:00
|
|
|
"""Interrupt running process, and provide a python prompt for
|
|
|
|
interactive debugging."""
|
2016-04-28 07:22:13 +02:00
|
|
|
d = {'_frame': frame} # Allow access to frame object.
|
2013-03-06 19:28:41 +01:00
|
|
|
d.update(frame.f_globals) # Unless shadowed by global
|
|
|
|
d.update(frame.f_locals)
|
|
|
|
|
2017-01-12 05:27:56 +01:00
|
|
|
message = "Signal received : entering python shell.\nTraceback:\n"
|
2013-03-06 19:28:41 +01:00
|
|
|
message += ''.join(traceback.format_stack(frame))
|
|
|
|
i = code.InteractiveConsole(d)
|
|
|
|
i.interact(message)
|
|
|
|
|
|
|
|
# SIGUSR1 => Just print the stack
|
|
|
|
# SIGUSR2 => Print stack + open interactive debugging shell
|
2017-11-05 11:15:10 +01:00
|
|
|
def interactive_debug_listen() -> None:
|
2016-10-17 08:22:00 +02:00
|
|
|
signal.signal(signal.SIGUSR1, lambda sig, stack: traceback.print_stack(stack))
|
2013-03-06 19:28:41 +01:00
|
|
|
signal.signal(signal.SIGUSR2, interactive_debug)
|
2017-10-04 01:29:53 +02:00
|
|
|
|
2018-03-12 00:50:54 +01:00
|
|
|
def tracemalloc_dump() -> None:
|
2017-10-04 01:29:53 +02:00
|
|
|
if not tracemalloc.is_tracing():
|
|
|
|
logger.warning("pid {}: tracemalloc off, nothing to dump"
|
|
|
|
.format(os.getpid()))
|
|
|
|
return
|
|
|
|
# Despite our name for it, `timezone_now` always deals in UTC.
|
|
|
|
basename = "snap.{}.{}".format(os.getpid(),
|
|
|
|
timezone_now().strftime("%F-%T"))
|
|
|
|
path = os.path.join(settings.TRACEMALLOC_DUMP_DIR, basename)
|
|
|
|
os.makedirs(settings.TRACEMALLOC_DUMP_DIR, exist_ok=True)
|
|
|
|
|
|
|
|
gc.collect()
|
|
|
|
tracemalloc.take_snapshot().dump(path)
|
|
|
|
|
2019-07-14 21:37:08 +02:00
|
|
|
with open('/proc/{}/stat'.format(os.getpid()), 'rb') as f:
|
|
|
|
procstat = f.read().split()
|
2017-10-04 01:29:53 +02:00
|
|
|
rss_pages = int(procstat[23])
|
|
|
|
logger.info("tracemalloc dump: tracing {} MiB ({} MiB peak), using {} MiB; rss {} MiB; dumped {}"
|
|
|
|
.format(tracemalloc.get_traced_memory()[0] // 1048576,
|
|
|
|
tracemalloc.get_traced_memory()[1] // 1048576,
|
|
|
|
tracemalloc.get_tracemalloc_memory() // 1048576,
|
|
|
|
rss_pages // 256,
|
|
|
|
basename))
|
|
|
|
|
2018-03-12 00:50:54 +01:00
|
|
|
def tracemalloc_listen_sock(sock: socket.socket) -> None:
|
2017-10-04 01:29:53 +02:00
|
|
|
logger.debug('pid {}: tracemalloc_listen_sock started!'.format(os.getpid()))
|
|
|
|
while True:
|
|
|
|
sock.recv(1)
|
|
|
|
tracemalloc_dump()
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
listener_pid: Optional[int] = None
|
2017-10-04 01:29:53 +02:00
|
|
|
|
2018-03-12 00:50:54 +01:00
|
|
|
def tracemalloc_listen() -> None:
|
2017-10-04 01:29:53 +02:00
|
|
|
global listener_pid
|
|
|
|
if listener_pid == os.getpid():
|
|
|
|
# Already set up -- and in this process, not just its parent.
|
|
|
|
return
|
|
|
|
logger.debug('pid {}: tracemalloc_listen working...'.format(os.getpid()))
|
|
|
|
listener_pid = os.getpid()
|
|
|
|
|
|
|
|
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
|
|
|
path = "/tmp/tracemalloc.{}".format(os.getpid())
|
|
|
|
sock.bind(path)
|
|
|
|
thread = threading.Thread(target=lambda: tracemalloc_listen_sock(sock),
|
|
|
|
daemon=True)
|
|
|
|
thread.start()
|
|
|
|
logger.debug('pid {}: tracemalloc_listen done: {}'.format(
|
|
|
|
os.getpid(), path))
|
|
|
|
|
2018-03-12 00:50:54 +01:00
|
|
|
def maybe_tracemalloc_listen() -> None:
|
2017-10-04 01:29:53 +02:00
|
|
|
'''If tracemalloc tracing enabled, listen for requests to dump a snapshot.
|
|
|
|
|
|
|
|
To trigger once this is listening:
|
|
|
|
echo | socat -u stdin unix-sendto:/tmp/tracemalloc.$pid
|
|
|
|
|
|
|
|
To enable in the Zulip web server: edit /etc/zulip/uwsgi.ini ,
|
|
|
|
and add e.g. ` PYTHONTRACEMALLOC=5` to the `env=` line.
|
|
|
|
This function is called in middleware, so the process will
|
|
|
|
automatically start listening.
|
|
|
|
|
|
|
|
To enable in other contexts: see upstream docs
|
|
|
|
https://docs.python.org/3/library/tracemalloc .
|
|
|
|
You may also have to add a call to this function somewhere.
|
|
|
|
|
|
|
|
'''
|
|
|
|
if os.environ.get('PYTHONTRACEMALLOC'):
|
|
|
|
# If the server was started with `tracemalloc` tracing on, then
|
|
|
|
# listen for a signal to dump `tracemalloc` snapshots.
|
|
|
|
tracemalloc_listen()
|