Remove statsd support.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-04-25 18:09:19 -07:00 committed by Tim Abbott
parent ca6b0d8c5a
commit 9db3451333
26 changed files with 13 additions and 318 deletions

View File

@ -65,7 +65,6 @@ module = [
"django_cte.*", "django_cte.*",
"django_otp.*", "django_otp.*",
"django_scim.*", "django_scim.*",
"django_statsd.*",
"DNS.*", "DNS.*",
"fakeldap.*", "fakeldap.*",
"gcm.*", "gcm.*",

View File

@ -45,9 +45,6 @@ django-auth-ldap
# Django extension providing bitfield support # Django extension providing bitfield support
django-bitfield django-bitfield
# Django extension for sending data to statsd
django-statsd-mozilla
# Needed for Android push notifications # Needed for Android push notifications
python-gcm python-gcm

View File

@ -559,10 +559,6 @@ django-scim2==0.19.0 \
--hash=sha256:0571c9798e3af1a199bc05d6726185675bdd960627c3f4406a0c125e3e12e025 \ --hash=sha256:0571c9798e3af1a199bc05d6726185675bdd960627c3f4406a0c125e3e12e025 \
--hash=sha256:531e8ae3d7dadb225e1b5addaa8c685f307de7eda5f23796ad0f4c7a96e06479 --hash=sha256:531e8ae3d7dadb225e1b5addaa8c685f307de7eda5f23796ad0f4c7a96e06479
# via -r requirements/common.in # via -r requirements/common.in
django-statsd-mozilla==0.4.0 \
--hash=sha256:0d87cb63de8107279cbb748caad9aa74c6a44e7e96ccc5dbf07b89f77285a4b8 \
--hash=sha256:81084f3d426f5184f0a0f1dbfe035cc26b66f041d2184559d916a228d856f0d3
# via -r requirements/common.in
django-stubs==1.16.0 \ django-stubs==1.16.0 \
--hash=sha256:1bd96207576cd220221a0e615f0259f13d453d515a80f576c1246e0fb547f561 \ --hash=sha256:1bd96207576cd220221a0e615f0259f13d453d515a80f576c1246e0fb547f561 \
--hash=sha256:c95f948e2bfc565f3147e969ff361ef033841a0b8a51cac974a6cc6d0486732c --hash=sha256:c95f948e2bfc565f3147e969ff361ef033841a0b8a51cac974a6cc6d0486732c
@ -2208,10 +2204,6 @@ stack-data==0.6.2 \
--hash=sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815 \ --hash=sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815 \
--hash=sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8 --hash=sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8
# via ipython # via ipython
statsd==4.0.0 \
--hash=sha256:52cd0e3843c07149538948ec803414e3ee356c2da7fa3ace656c1e216a00f70c \
--hash=sha256:e767eb2b8c705ca4c0a55b96fe47f80ce15ba98c7a851dd2ad00504d29cb2e3c
# via django-statsd-mozilla
stripe==5.4.0 \ stripe==5.4.0 \
--hash=sha256:57c0da7e3b889b69ff1dbf23ac1ec5e00f665cfba069fdf0f328b83ddf4225df \ --hash=sha256:57c0da7e3b889b69ff1dbf23ac1ec5e00f665cfba069fdf0f328b83ddf4225df \
--hash=sha256:72bda7bf9be7528e1b97a5bbacb0716cdf6a0c9597b13fdbfa364cec3c130713 --hash=sha256:72bda7bf9be7528e1b97a5bbacb0716cdf6a0c9597b13fdbfa364cec3c130713

View File

@ -393,10 +393,6 @@ django-scim2==0.19.0 \
--hash=sha256:0571c9798e3af1a199bc05d6726185675bdd960627c3f4406a0c125e3e12e025 \ --hash=sha256:0571c9798e3af1a199bc05d6726185675bdd960627c3f4406a0c125e3e12e025 \
--hash=sha256:531e8ae3d7dadb225e1b5addaa8c685f307de7eda5f23796ad0f4c7a96e06479 --hash=sha256:531e8ae3d7dadb225e1b5addaa8c685f307de7eda5f23796ad0f4c7a96e06479
# via -r requirements/common.in # via -r requirements/common.in
django-statsd-mozilla==0.4.0 \
--hash=sha256:0d87cb63de8107279cbb748caad9aa74c6a44e7e96ccc5dbf07b89f77285a4b8 \
--hash=sha256:81084f3d426f5184f0a0f1dbfe035cc26b66f041d2184559d916a228d856f0d3
# via -r requirements/common.in
django-stubs-ext==0.8.0 \ django-stubs-ext==0.8.0 \
--hash=sha256:9a9ba9e2808737949de96a0fce8b054f12d38e461011d77ebc074ffe8c43dfcb \ --hash=sha256:9a9ba9e2808737949de96a0fce8b054f12d38e461011d77ebc074ffe8c43dfcb \
--hash=sha256:a454d349d19c26d6c50c4c6dbc1e8af4a9cda4ce1dc4104e3dd4c0330510cc56 --hash=sha256:a454d349d19c26d6c50c4c6dbc1e8af4a9cda4ce1dc4104e3dd4c0330510cc56
@ -1518,10 +1514,6 @@ stack-data==0.6.2 \
--hash=sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815 \ --hash=sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815 \
--hash=sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8 --hash=sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8
# via ipython # via ipython
statsd==4.0.0 \
--hash=sha256:52cd0e3843c07149538948ec803414e3ee356c2da7fa3ace656c1e216a00f70c \
--hash=sha256:e767eb2b8c705ca4c0a55b96fe47f80ce15ba98c7a851dd2ad00504d29cb2e3c
# via django-statsd-mozilla
stripe==5.4.0 \ stripe==5.4.0 \
--hash=sha256:57c0da7e3b889b69ff1dbf23ac1ec5e00f665cfba069fdf0f328b83ddf4225df \ --hash=sha256:57c0da7e3b889b69ff1dbf23ac1ec5e00f665cfba069fdf0f328b83ddf4225df \
--hash=sha256:72bda7bf9be7528e1b97a5bbacb0716cdf6a0c9597b13fdbfa364cec3c130713 --hash=sha256:72bda7bf9be7528e1b97a5bbacb0716cdf6a0c9597b13fdbfa364cec3c130713

View File

@ -103,7 +103,6 @@ not_yet_fully_covered = [
"zerver/lib/queue.py", "zerver/lib/queue.py",
"zerver/lib/sqlalchemy_utils.py", "zerver/lib/sqlalchemy_utils.py",
"zerver/lib/storage.py", "zerver/lib/storage.py",
"zerver/lib/utils.py",
"zerver/lib/zephyr.py", "zerver/lib/zephyr.py",
"zerver/lib/templates.py", "zerver/lib/templates.py",
"zerver/templatetags/minified_js.py", "zerver/templatetags/minified_js.py",

View File

@ -48,4 +48,4 @@ API_FEATURE_LEVEL = 177
# historical commits sharing the same major version, in which case a # historical commits sharing the same major version, in which case a
# minor version bump suffices. # minor version bump suffices.
PROVISION_VERSION = (234, 1) PROVISION_VERSION = (235, 0)

View File

@ -18,7 +18,6 @@ from zerver.lib.message import (
from zerver.lib.queue import queue_json_publish from zerver.lib.queue import queue_json_publish
from zerver.lib.stream_subscription import get_subscribed_stream_recipient_ids_for_user from zerver.lib.stream_subscription import get_subscribed_stream_recipient_ids_for_user
from zerver.lib.topic import filter_by_topic_name_via_message from zerver.lib.topic import filter_by_topic_name_via_message
from zerver.lib.utils import log_statsd_event
from zerver.models import Message, Recipient, UserMessage, UserProfile from zerver.models import Message, Recipient, UserMessage, UserProfile
from zerver.tornado.django_api import send_event from zerver.tornado.django_api import send_event
@ -34,8 +33,6 @@ class ReadMessagesEvent:
def do_mark_all_as_read(user_profile: UserProfile) -> int: def do_mark_all_as_read(user_profile: UserProfile) -> int:
log_statsd_event("bankruptcy")
# First, we clear mobile push notifications. This is safer in the # First, we clear mobile push notifications. This is safer in the
# event that the below logic times out and we're killed. # event that the below logic times out and we're killed.
all_push_message_ids = ( all_push_message_ids = (
@ -101,8 +98,6 @@ def do_mark_all_as_read(user_profile: UserProfile) -> int:
def do_mark_stream_messages_as_read( def do_mark_stream_messages_as_read(
user_profile: UserProfile, stream_recipient_id: int, topic_name: Optional[str] = None user_profile: UserProfile, stream_recipient_id: int, topic_name: Optional[str] = None
) -> int: ) -> int:
log_statsd_event("mark_stream_as_read")
with transaction.atomic(savepoint=False): with transaction.atomic(savepoint=False):
query = ( query = (
UserMessage.select_for_update_query() UserMessage.select_for_update_query()

View File

@ -5,7 +5,6 @@ from django.conf import settings
from django.db import transaction from django.db import transaction
from zerver.actions.user_activity import update_user_activity_interval from zerver.actions.user_activity import update_user_activity_interval
from zerver.decorator import statsd_increment
from zerver.lib.queue import queue_json_publish from zerver.lib.queue import queue_json_publish
from zerver.lib.timestamp import datetime_to_timestamp from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import Client, UserPresence, UserProfile, active_user_ids, get_client from zerver.models import Client, UserPresence, UserProfile, active_user_ids, get_client
@ -66,7 +65,6 @@ def consolidate_client(client: Client) -> Client:
return client return client
@statsd_increment("user_presence")
def do_update_user_presence( def do_update_user_presence(
user_profile: UserProfile, user_profile: UserProfile,
client: Client, client: Client,

View File

@ -1,6 +1,5 @@
import datetime import datetime
from zerver.decorator import statsd_increment
from zerver.lib.queue import queue_json_publish from zerver.lib.queue import queue_json_publish
from zerver.lib.timestamp import datetime_to_timestamp from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import UserActivity, UserActivityInterval, UserProfile from zerver.models import UserActivity, UserActivityInterval, UserProfile
@ -32,7 +31,6 @@ def do_update_user_activity_interval(
) )
@statsd_increment("user_activity")
def do_update_user_activity( def do_update_user_activity(
user_profile_id: int, client_id: int, query: str, count: int, log_time: datetime.datetime user_profile_id: int, client_id: int, query: str, count: int, log_time: datetime.datetime
) -> None: ) -> None:

View File

@ -59,7 +59,7 @@ from zerver.lib.response import json_method_not_allowed
from zerver.lib.subdomains import get_subdomain, user_matches_subdomain from zerver.lib.subdomains import get_subdomain, user_matches_subdomain
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.lib.users import is_2fa_verified from zerver.lib.users import is_2fa_verified
from zerver.lib.utils import has_api_key_format, statsd from zerver.lib.utils import has_api_key_format
from zerver.models import UserProfile, get_client, get_user_profile_by_api_key from zerver.models import UserProfile, get_client, get_user_profile_by_api_key
if TYPE_CHECKING: if TYPE_CHECKING:
@ -942,26 +942,6 @@ def to_utc_datetime(var_name: str, timestamp: str) -> datetime.datetime:
return timestamp_to_datetime(float(timestamp)) return timestamp_to_datetime(float(timestamp))
def statsd_increment(
counter: str, val: int = 1
) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]:
"""Increments a statsd counter on completion of the
decorated function.
Pass the name of the counter to this decorator-returning function."""
def wrapper(func: Callable[ParamT, ReturnT]) -> Callable[ParamT, ReturnT]:
@wraps(func)
def wrapped_func(*args: ParamT.args, **kwargs: ParamT.kwargs) -> ReturnT:
ret = func(*args, **kwargs)
statsd.incr(counter, val)
return ret
return wrapped_func
return wrapper
def return_success_on_head_request( def return_success_on_head_request(
view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse] view_func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse]
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]: ) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:

View File

@ -30,7 +30,7 @@ from django.http import HttpRequest
from django_stubs_ext import QuerySetAny from django_stubs_ext import QuerySetAny
from typing_extensions import ParamSpec from typing_extensions import ParamSpec
from zerver.lib.utils import make_safe_digest, statsd, statsd_key from zerver.lib.utils import make_safe_digest
if TYPE_CHECKING: if TYPE_CHECKING:
# These modules have to be imported for type annotations but # These modules have to be imported for type annotations but
@ -129,7 +129,6 @@ def cache_with_key(
keyfunc: Callable[ParamT, str], keyfunc: Callable[ParamT, str],
cache_name: Optional[str] = None, cache_name: Optional[str] = None,
timeout: Optional[int] = None, timeout: Optional[int] = None,
with_statsd_key: Optional[str] = None,
) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]: ) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]:
"""Decorator which applies Django caching to a function. """Decorator which applies Django caching to a function.
@ -150,18 +149,6 @@ def cache_with_key(
log_invalid_cache_keys(stack_trace, [key]) log_invalid_cache_keys(stack_trace, [key])
return func(*args, **kwargs) return func(*args, **kwargs)
extra = ""
if cache_name == "database":
extra = ".dbcache"
if with_statsd_key is not None:
metric_key = with_statsd_key
else:
metric_key = statsd_key(key)
status = "hit" if val is not None else "miss"
statsd.incr(f"cache{extra}.{metric_key}.{status}")
# Values are singleton tuples so that we can distinguish # Values are singleton tuples so that we can distinguish
# a result of None from a missing key. # a result of None from a missing key.
if val is not None: if val is not None:

View File

@ -20,7 +20,6 @@ from django.utils.translation import override as override_language
from lxml.html import builder as e from lxml.html import builder as e
from confirmation.models import one_click_unsubscribe_link from confirmation.models import one_click_unsubscribe_link
from zerver.decorator import statsd_increment
from zerver.lib.markdown.fenced_code import FENCE_RE from zerver.lib.markdown.fenced_code import FENCE_RE
from zerver.lib.message import bulk_access_messages from zerver.lib.message import bulk_access_messages
from zerver.lib.notification_data import get_mentioned_user_group_name from zerver.lib.notification_data import get_mentioned_user_group_name
@ -408,7 +407,6 @@ def include_realm_name_in_missedmessage_emails_subject(user_profile: UserProfile
) )
@statsd_increment("missed_message_reminders")
def do_send_missedmessage_events_reply_in_zulip( def do_send_missedmessage_events_reply_in_zulip(
user_profile: UserProfile, missed_messages: List[Dict[str, Any]], message_count: int user_profile: UserProfile, missed_messages: List[Dict[str, Any]], message_count: int
) -> None: ) -> None:

View File

@ -418,7 +418,7 @@ def has_blockquote_ancestor(element_pair: Optional[ElementPair]) -> bool:
return has_blockquote_ancestor(element_pair.parent) return has_blockquote_ancestor(element_pair.parent)
@cache_with_key(lambda tweet_id: tweet_id, cache_name="database", with_statsd_key="tweet_data") @cache_with_key(lambda tweet_id: tweet_id, cache_name="database")
def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]: def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]:
if settings.TEST_SUITE: if settings.TEST_SUITE:
from . import testing_mocks from . import testing_mocks

View File

@ -18,7 +18,6 @@ from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from django.utils.translation import override as override_language from django.utils.translation import override as override_language
from zerver.decorator import statsd_increment
from zerver.lib.avatar import absolute_avatar_url from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import JsonableError from zerver.lib.exceptions import JsonableError
from zerver.lib.message import access_message, huddle_users from zerver.lib.message import access_message, huddle_users
@ -180,7 +179,6 @@ def modernize_apns_payload(data: Mapping[str, Any]) -> Mapping[str, Any]:
APNS_MAX_RETRIES = 3 APNS_MAX_RETRIES = 3
@statsd_increment("apple_push_notification")
def send_apple_push_notification( def send_apple_push_notification(
user_identity: UserPushIdentityCompat, user_identity: UserPushIdentityCompat,
devices: Sequence[DeviceToken], devices: Sequence[DeviceToken],
@ -357,7 +355,6 @@ def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
return priority # when this grows a second option, can make it a tuple return priority # when this grows a second option, can make it a tuple
@statsd_increment("android_push_notification")
def send_android_push_notification( def send_android_push_notification(
user_identity: UserPushIdentityCompat, user_identity: UserPushIdentityCompat,
devices: Sequence[DeviceToken], devices: Sequence[DeviceToken],
@ -1036,7 +1033,6 @@ def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]
).update(flags=F("flags").bitand(~UserMessage.flags.active_mobile_push_notification)) ).update(flags=F("flags").bitand(~UserMessage.flags.active_mobile_push_notification))
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None: def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
""" """
missed_message is the event received by the missed_message is the event received by the

View File

@ -18,7 +18,7 @@ from pika.channel import Channel
from pika.spec import Basic from pika.spec import Basic
from tornado import ioloop from tornado import ioloop
from zerver.lib.utils import assert_is_not_none, statsd from zerver.lib.utils import assert_is_not_none
MAX_REQUEST_RETRIES = 3 MAX_REQUEST_RETRIES = 3
ChannelT = TypeVar("ChannelT", Channel, BlockingChannel) ChannelT = TypeVar("ChannelT", Channel, BlockingChannel)
@ -128,8 +128,6 @@ class QueueClient(Generic[ChannelT], metaclass=ABCMeta):
body=body, body=body,
) )
statsd.incr(f"rabbitmq.publish.{queue_name}")
self.ensure_queue(queue_name, do_publish) self.ensure_queue(queue_name, do_publish)
def json_publish(self, queue_name: str, body: Mapping[str, Any]) -> None: def json_publish(self, queue_name: str, body: Mapping[str, Any]) -> None:

View File

@ -13,7 +13,6 @@ from django.http import HttpRequest
from zerver.lib.cache import cache_with_key from zerver.lib.cache import cache_with_key
from zerver.lib.exceptions import RateLimitedError from zerver.lib.exceptions import RateLimitedError
from zerver.lib.redis_utils import get_redis_client from zerver.lib.redis_utils import get_redis_client
from zerver.lib.utils import statsd
from zerver.models import UserProfile from zerver.models import UserProfile
# Implement a rate-limiting scheme inspired by the one described here, but heavily modified # Implement a rate-limiting scheme inspired by the one described here, but heavily modified
@ -322,7 +321,6 @@ class TornadoInMemoryRateLimiterBackend(RateLimiterBackend):
ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count) ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count)
if ratelimited: if ratelimited:
statsd.incr(f"ratelimiter.limited.{entity_key}")
break break
return ratelimited, time_till_free return ratelimited, time_till_free
@ -491,10 +489,7 @@ class RedisRateLimiterBackend(RateLimiterBackend):
) -> Tuple[bool, float]: ) -> Tuple[bool, float]:
ratelimited, time = cls.is_ratelimited(entity_key, rules) ratelimited, time = cls.is_ratelimited(entity_key, rules)
if ratelimited: if not ratelimited:
statsd.incr(f"ratelimiter.limited.{entity_key}")
else:
try: try:
cls.incr_ratelimit(entity_key, max_api_calls, max_api_window) cls.incr_ratelimit(entity_key, max_api_calls, max_api_window)
except RateLimiterLockingError: except RateLimiterLockingError:

View File

@ -81,7 +81,7 @@ def catch_network_errors(func: Callable[..., Any]) -> Callable[..., Any]:
@catch_network_errors @catch_network_errors
@cache_with_key(preview_url_cache_key, with_statsd_key="urlpreview_data") @cache_with_key(preview_url_cache_key)
def get_link_embed_data( def get_link_embed_data(
url: str, maxwidth: int = 640, maxheight: int = 480 url: str, maxwidth: int = 640, maxheight: int = 480
) -> Optional[UrlEmbedData]: ) -> Optional[UrlEmbedData]:

View File

@ -1,9 +1,7 @@
import hashlib import hashlib
import re import re
import secrets import secrets
from typing import TYPE_CHECKING, Any, Callable, List, Optional, TypeVar from typing import TYPE_CHECKING, Callable, List, Optional, TypeVar
from django.conf import settings
if TYPE_CHECKING: if TYPE_CHECKING:
from hashlib import _Hash from hashlib import _Hash
@ -11,53 +9,6 @@ if TYPE_CHECKING:
T = TypeVar("T") T = TypeVar("T")
def statsd_key(val: str, clean_periods: bool = False) -> str:
if ":" in val:
val = val.split(":")[0]
val = val.replace("-", "_")
if clean_periods:
val = val.replace(".", "_")
return val
class StatsDWrapper:
"""Transparently either submit metrics to statsd
or do nothing without erroring out"""
# Backported support for gauge deltas
# as our statsd server supports them but supporting
# pystatsd is not released yet
def _our_gauge(self, stat: str, value: float, rate: float = 1, delta: bool = False) -> None:
"""Set a gauge value."""
from django_statsd.clients import statsd
if delta:
value_str = f"{value:+g}|g"
else:
value_str = f"{value:g}|g"
statsd._send(stat, value_str, rate)
def __getattr__(self, name: str) -> Any:
# Hand off to statsd if we have it enabled
# otherwise do nothing
if name in ["timer", "timing", "incr", "decr", "gauge"]:
if settings.STATSD_HOST != "":
from django_statsd.clients import statsd
if name == "gauge":
return self._our_gauge
else:
return getattr(statsd, name)
else:
return lambda *args, **kwargs: None
raise AttributeError
statsd = StatsDWrapper()
def make_safe_digest(string: str, hash_func: "Callable[[bytes], _Hash]" = hashlib.sha1) -> str: def make_safe_digest(string: str, hash_func: "Callable[[bytes], _Hash]" = hashlib.sha1) -> str:
""" """
return a hex digest of `string`. return a hex digest of `string`.
@ -67,21 +18,6 @@ def make_safe_digest(string: str, hash_func: "Callable[[bytes], _Hash]" = hashli
return hash_func(string.encode()).hexdigest() return hash_func(string.encode()).hexdigest()
def log_statsd_event(name: str) -> None:
"""
Sends a single event to statsd with the desired name and the current timestamp
This can be used to provide vertical lines in generated graphs,
for example when doing a prod deploy, bankruptcy request, or
other one-off events
Note that to draw this event as a vertical line in graphite
you can use the drawAsInfinite() command
"""
event_name = f"events.{name}"
statsd.incr(event_name)
def generate_api_key() -> str: def generate_api_key() -> str:
api_key = "" api_key = ""
while len(api_key) < 32: while len(api_key) < 32:

View File

@ -42,7 +42,6 @@ from zerver.lib.response import (
) )
from zerver.lib.subdomains import get_subdomain from zerver.lib.subdomains import get_subdomain
from zerver.lib.user_agent import parse_user_agent from zerver.lib.user_agent import parse_user_agent
from zerver.lib.utils import statsd
from zerver.models import Realm, flush_per_request_caches, get_realm from zerver.models import Realm, flush_per_request_caches, get_realm
ParamT = ParamSpec("ParamT") ParamT = ParamSpec("ParamT")
@ -120,29 +119,6 @@ def is_slow_query(time_delta: float, path: str) -> bool:
return True return True
statsd_blacklisted_requests = [
"do_confirm",
"signup_send_confirm",
"new_realm_send_confirm",
"eventslast_event_id",
"webreq.content",
"avatar",
"user_uploads",
"password.reset",
"static",
"json.bots",
"json.users",
"json.streams",
"accounts.unsubscribe",
"apple-touch-icon",
"emoji",
"json.bots",
"upload_file",
"realm_activity",
"user_activity",
]
def write_log_line( def write_log_line(
log_data: MutableMapping[str, Any], log_data: MutableMapping[str, Any],
path: str, path: str,
@ -159,24 +135,6 @@ def write_log_line(
if error_content is not None: if error_content is not None:
error_content_iter = (error_content,) error_content_iter = (error_content,)
if settings.STATSD_HOST != "":
# For statsd timer name
if path == "/":
statsd_path = "webreq"
else:
statsd_path = "webreq.{}".format(path[1:].replace("/", "."))
# Remove non-ascii chars from path (there should be none; if there are, it's
# because someone manually entered a nonexistent path), as UTF-8 chars make
# statsd sad when it sends the key name over the socket
statsd_path = statsd_path.encode("ascii", errors="ignore").decode("ascii")
# TODO: This could probably be optimized to use a regular expression rather than a loop.
suppress_statsd = any(
blacklisted in statsd_path for blacklisted in statsd_blacklisted_requests
)
else:
suppress_statsd = True
statsd_path = ""
time_delta = -1 time_delta = -1
# A time duration of -1 means the StartLogRequests middleware # A time duration of -1 means the StartLogRequests middleware
# didn't run for some reason # didn't run for some reason
@ -210,10 +168,6 @@ def write_log_line(
f" (mem: {format_timedelta(remote_cache_time_delta)}/{remote_cache_count_delta})" f" (mem: {format_timedelta(remote_cache_time_delta)}/{remote_cache_count_delta})"
) )
if not suppress_statsd:
statsd.timing(f"{statsd_path}.remote_cache.time", timedelta_ms(remote_cache_time_delta))
statsd.incr(f"{statsd_path}.remote_cache.querycount", remote_cache_count_delta)
startup_output = "" startup_output = ""
if "startup_time_delta" in log_data and log_data["startup_time_delta"] > 0.005: if "startup_time_delta" in log_data and log_data["startup_time_delta"] > 0.005:
startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"])) startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"]))
@ -236,10 +190,6 @@ def write_log_line(
f" (md: {format_timedelta(markdown_time_delta)}/{markdown_count_delta})" f" (md: {format_timedelta(markdown_time_delta)}/{markdown_count_delta})"
) )
if not suppress_statsd:
statsd.timing(f"{statsd_path}.markdown.time", timedelta_ms(markdown_time_delta))
statsd.incr(f"{statsd_path}.markdown.count", markdown_count_delta)
# Get the amount of time spent doing database queries # Get the amount of time spent doing database queries
db_time_output = "" db_time_output = ""
queries = connection.connection.queries if connection.connection is not None else [] queries = connection.connection.queries if connection.connection is not None else []
@ -247,12 +197,6 @@ def write_log_line(
query_time = sum(float(query.get("time", 0)) for query in queries) query_time = sum(float(query.get("time", 0)) for query in queries)
db_time_output = f" (db: {format_timedelta(query_time)}/{len(queries)}q)" db_time_output = f" (db: {format_timedelta(query_time)}/{len(queries)}q)"
if not suppress_statsd:
# Log ms, db ms, and num queries to statsd
statsd.timing(f"{statsd_path}.dbtime", timedelta_ms(query_time))
statsd.incr(f"{statsd_path}.dbq", len(queries))
statsd.timing(f"{statsd_path}.total", timedelta_ms(time_delta))
if "extra" in log_data: if "extra" in log_data:
extra_request_data = " {}".format(log_data["extra"]) extra_request_data = " {}".format(log_data["extra"])
else: else:

View File

@ -1,23 +1,4 @@
from typing import Callable, ContextManager, List, Tuple
from unittest import mock
from zerver.lib.test_classes import ZulipTestCase from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.utils import statsd
class StatsMock:
def __init__(self, settings: Callable[..., ContextManager[None]]) -> None:
self.settings = settings
self.real_impl = statsd
self.func_calls: List[Tuple[str, Tuple[object, ...]]] = []
def __getattr__(self, name: str) -> Callable[..., None]:
def f(*args: object) -> None:
with self.settings(STATSD_HOST=""):
getattr(self.real_impl, name)(*args)
self.func_calls.append((name, args))
return f
class TestReport(ZulipTestCase): class TestReport(ZulipTestCase):
@ -32,20 +13,9 @@ class TestReport(ZulipTestCase):
rendered_content_disparity="true", rendered_content_disparity="true",
) )
stats_mock = StatsMock(self.settings) result = self.client_post("/json/report/send_times", params)
with mock.patch("zerver.views.report.statsd", wraps=stats_mock):
result = self.client_post("/json/report/send_times", params)
self.assert_json_success(result) self.assert_json_success(result)
expected_calls = [
("timing", ("endtoend.send_time.zulip", 5)),
("timing", ("endtoend.receive_time.zulip", 6)),
("timing", ("endtoend.displayed_time.zulip", 7)),
("incr", ("locally_echoed",)),
("incr", ("render_disparity",)),
]
self.assertEqual(stats_mock.func_calls, expected_calls)
def test_narrow_time(self) -> None: def test_narrow_time(self) -> None:
self.login("hamlet") self.login("hamlet")
@ -55,18 +25,9 @@ class TestReport(ZulipTestCase):
network=7, network=7,
) )
stats_mock = StatsMock(self.settings) result = self.client_post("/json/report/narrow_times", params)
with mock.patch("zerver.views.report.statsd", wraps=stats_mock):
result = self.client_post("/json/report/narrow_times", params)
self.assert_json_success(result) self.assert_json_success(result)
expected_calls = [
("timing", ("narrow.initial_core.zulip", 5)),
("timing", ("narrow.initial_free.zulip", 6)),
("timing", ("narrow.network.zulip", 7)),
]
self.assertEqual(stats_mock.func_calls, expected_calls)
def test_anonymous_user_narrow_time(self) -> None: def test_anonymous_user_narrow_time(self) -> None:
params = dict( params = dict(
initial_core=5, initial_core=5,
@ -74,18 +35,9 @@ class TestReport(ZulipTestCase):
network=7, network=7,
) )
stats_mock = StatsMock(self.settings) result = self.client_post("/json/report/narrow_times", params)
with mock.patch("zerver.views.report.statsd", wraps=stats_mock):
result = self.client_post("/json/report/narrow_times", params)
self.assert_json_success(result) self.assert_json_success(result)
expected_calls = [
("timing", ("narrow.initial_core.zulip", 5)),
("timing", ("narrow.initial_free.zulip", 6)),
("timing", ("narrow.network.zulip", 7)),
]
self.assertEqual(stats_mock.func_calls, expected_calls)
def test_unnarrow_time(self) -> None: def test_unnarrow_time(self) -> None:
self.login("hamlet") self.login("hamlet")
@ -94,34 +46,18 @@ class TestReport(ZulipTestCase):
initial_free=6, initial_free=6,
) )
stats_mock = StatsMock(self.settings) result = self.client_post("/json/report/unnarrow_times", params)
with mock.patch("zerver.views.report.statsd", wraps=stats_mock):
result = self.client_post("/json/report/unnarrow_times", params)
self.assert_json_success(result) self.assert_json_success(result)
expected_calls = [
("timing", ("unnarrow.initial_core.zulip", 5)),
("timing", ("unnarrow.initial_free.zulip", 6)),
]
self.assertEqual(stats_mock.func_calls, expected_calls)
def test_anonymous_user_unnarrow_time(self) -> None: def test_anonymous_user_unnarrow_time(self) -> None:
params = dict( params = dict(
initial_core=5, initial_core=5,
initial_free=6, initial_free=6,
) )
stats_mock = StatsMock(self.settings) result = self.client_post("/json/report/unnarrow_times", params)
with mock.patch("zerver.views.report.statsd", wraps=stats_mock):
result = self.client_post("/json/report/unnarrow_times", params)
self.assert_json_success(result) self.assert_json_success(result)
expected_calls = [
("timing", ("unnarrow.initial_core.zulip", 5)),
("timing", ("unnarrow.initial_free.zulip", 6)),
]
self.assertEqual(stats_mock.func_calls, expected_calls)
def test_report_csp_violations(self) -> None: def test_report_csp_violations(self) -> None:
fixture_data = self.fixture_data("csp_report.json") fixture_data = self.fixture_data("csp_report.json")
with self.assertLogs(level="WARNING") as warn_logs: with self.assertLogs(level="WARNING") as warn_logs:

View File

@ -42,7 +42,6 @@ from zerver.lib.message import MessageDict
from zerver.lib.narrow import build_narrow_filter from zerver.lib.narrow import build_narrow_filter
from zerver.lib.notification_data import UserMessageNotificationsData from zerver.lib.notification_data import UserMessageNotificationsData
from zerver.lib.queue import queue_json_publish, retry_event from zerver.lib.queue import queue_json_publish, retry_event
from zerver.lib.utils import statsd
from zerver.middleware import async_request_timer_restart from zerver.middleware import async_request_timer_restart
from zerver.models import CustomProfileField from zerver.models import CustomProfileField
from zerver.tornado.descriptors import clear_descriptor_by_handler_id, set_descriptor_by_handler_id from zerver.tornado.descriptors import clear_descriptor_by_handler_id, set_descriptor_by_handler_id
@ -565,8 +564,6 @@ def gc_event_queues(port: int) -> None:
len(clients), len(clients),
handler_stats_string(), handler_stats_string(),
) )
statsd.gauge("tornado.active_queues", len(clients))
statsd.gauge("tornado.active_users", len(user_clients))
def persistent_queue_filename(port: int, last: bool = False) -> str: def persistent_queue_filename(port: int, last: bool = False) -> str:

View File

@ -17,7 +17,6 @@ from zerver.lib.request import RequestNotes
from zerver.lib.streams import access_stream_by_name from zerver.lib.streams import access_stream_by_name
from zerver.lib.subdomains import get_subdomain from zerver.lib.subdomains import get_subdomain
from zerver.lib.user_counts import realm_user_count from zerver.lib.user_counts import realm_user_count
from zerver.lib.utils import statsd
from zerver.models import PreregistrationUser, Realm, Stream, UserProfile from zerver.models import PreregistrationUser, Realm, Stream, UserProfile
@ -172,8 +171,6 @@ def home_real(request: HttpRequest) -> HttpResponse:
update_last_reminder(user_profile) update_last_reminder(user_profile)
statsd.incr("views.home")
# If a user hasn't signed the current Terms of Service, send them there # If a user hasn't signed the current Terms of Service, send them there
if need_accept_tos(user_profile): if need_accept_tos(user_profile):
return accounts_accept_terms(request) return accounts_accept_terms(request)

View File

@ -23,7 +23,6 @@ from zerver.lib.request import REQ, RequestNotes, has_request_variables
from zerver.lib.response import json_success from zerver.lib.response import json_success
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
from zerver.lib.topic import DB_TOPIC_NAME, MATCH_TOPIC, topic_column_sa from zerver.lib.topic import DB_TOPIC_NAME, MATCH_TOPIC, topic_column_sa
from zerver.lib.utils import statsd
from zerver.lib.validator import check_bool, check_int, check_list, to_non_negative_int from zerver.lib.validator import check_bool, check_int, check_list, to_non_negative_int
from zerver.models import UserMessage, UserProfile from zerver.models import UserMessage, UserProfile
@ -222,8 +221,6 @@ def get_messages_backend(
allow_edit_history=realm.allow_edit_history, allow_edit_history=realm.allow_edit_history,
) )
statsd.incr("loaded_old_messages", len(message_list))
ret = dict( ret = dict(
messages=message_list, messages=message_list,
result="success", result="success",

View File

@ -7,11 +7,9 @@ from django.http import HttpRequest, HttpResponse
from django.views.decorators.csrf import csrf_exempt from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST from django.views.decorators.http import require_POST
from zerver.context_processors import get_valid_realm_from_request
from zerver.decorator import human_users_only from zerver.decorator import human_users_only
from zerver.lib.request import REQ, RequestNotes, has_request_variables from zerver.lib.request import REQ, RequestNotes, has_request_variables
from zerver.lib.response import json_success from zerver.lib.response import json_success
from zerver.lib.utils import statsd, statsd_key
from zerver.lib.validator import ( from zerver.lib.validator import (
WildValue, WildValue,
check_bool, check_bool,
@ -46,16 +44,6 @@ def report_send_times(
"extra" "extra"
] = f"[{time}ms/{received_str}ms/{displayed_str}ms/echo:{locally_echoed}/diff:{rendered_content_disparity}]" ] = f"[{time}ms/{received_str}ms/{displayed_str}ms/echo:{locally_echoed}/diff:{rendered_content_disparity}]"
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
statsd.timing(f"endtoend.send_time.{base_key}", time)
if received > 0:
statsd.timing(f"endtoend.receive_time.{base_key}", received)
if displayed > 0:
statsd.timing(f"endtoend.displayed_time.{base_key}", displayed)
if locally_echoed:
statsd.incr("locally_echoed")
if rendered_content_disparity:
statsd.incr("render_disparity")
return json_success(request) return json_success(request)
@ -70,11 +58,6 @@ def report_narrow_times(
log_data = RequestNotes.get_notes(request).log_data log_data = RequestNotes.get_notes(request).log_data
assert log_data is not None assert log_data is not None
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]" log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]"
realm = get_valid_realm_from_request(request)
base_key = statsd_key(realm.string_id, clean_periods=True)
statsd.timing(f"narrow.initial_core.{base_key}", initial_core)
statsd.timing(f"narrow.initial_free.{base_key}", initial_free)
statsd.timing(f"narrow.network.{base_key}", network)
return json_success(request) return json_success(request)
@ -88,10 +71,6 @@ def report_unnarrow_times(
log_data = RequestNotes.get_notes(request).log_data log_data = RequestNotes.get_notes(request).log_data
assert log_data is not None assert log_data is not None
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]" log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]"
realm = get_valid_realm_from_request(request)
base_key = statsd_key(realm.string_id, clean_periods=True)
statsd.timing(f"unnarrow.initial_core.{base_key}", initial_core)
statsd.timing(f"unnarrow.initial_free.{base_key}", initial_free)
return json_success(request) return json_success(request)

View File

@ -61,7 +61,6 @@ from .configured_settings import (
SOCIAL_AUTH_SAML_SECURITY_CONFIG, SOCIAL_AUTH_SAML_SECURITY_CONFIG,
SOCIAL_AUTH_SUBDOMAIN, SOCIAL_AUTH_SUBDOMAIN,
STATIC_URL, STATIC_URL,
STATSD_HOST,
TORNADO_PORTS, TORNADO_PORTS,
USING_PGROONGA, USING_PGROONGA,
ZULIP_ADMINISTRATOR, ZULIP_ADMINISTRATOR,
@ -515,17 +514,6 @@ if PRODUCTION:
INTERNAL_BOT_DOMAIN = "zulip.com" INTERNAL_BOT_DOMAIN = "zulip.com"
########################################################################
# STATSD CONFIGURATION
########################################################################
# Statsd is not super well supported; if you want to use it you'll need
# to set STATSD_HOST and STATSD_PREFIX.
if STATSD_HOST != "":
INSTALLED_APPS += ["django_statsd"]
STATSD_PORT = 8125
STATSD_CLIENT = "django_statsd.clients.normal"
######################################################################## ########################################################################
# CAMO HTTPS CACHE CONFIGURATION # CAMO HTTPS CACHE CONFIGURATION
######################################################################## ########################################################################

View File

@ -449,9 +449,6 @@ FIRST_TIME_TERMS_OF_SERVICE_TEMPLATE: Optional[str] = None
# written. # written.
TERMS_OF_SERVICE_MESSAGE: Optional[str] = None TERMS_OF_SERVICE_MESSAGE: Optional[str] = None
# Hostname used for Zulip's statsd logging integration.
STATSD_HOST = ""
# Configuration for JWT auth (sign in and API key fetch) # Configuration for JWT auth (sign in and API key fetch)
JWT_AUTH_KEYS: Dict[str, JwtAuthKey] = {} JWT_AUTH_KEYS: Dict[str, JwtAuthKey] = {}