2017-05-05 12:07:10 +02:00
|
|
|
import os
|
|
|
|
|
2019-12-30 02:26:08 +01:00
|
|
|
from abc import ABC, abstractmethod
|
2020-04-02 20:40:10 +02:00
|
|
|
from typing import Dict, List, Optional, Tuple, Type
|
2016-03-27 12:09:54 +02:00
|
|
|
|
2013-05-29 23:58:07 +02:00
|
|
|
from django.conf import settings
|
2019-04-01 20:11:56 +02:00
|
|
|
from django.http import HttpRequest
|
|
|
|
from zerver.lib.exceptions import RateLimited
|
2014-02-05 00:35:32 +01:00
|
|
|
from zerver.lib.redis_utils import get_redis_client
|
2019-03-23 18:33:37 +01:00
|
|
|
from zerver.lib.utils import statsd
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2016-03-27 12:09:54 +02:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
|
2019-03-23 18:33:37 +01:00
|
|
|
import logging
|
2013-05-29 23:58:07 +02:00
|
|
|
import redis
|
|
|
|
import time
|
|
|
|
|
|
|
|
# Implement a rate-limiting scheme inspired by the one described here, but heavily modified
|
2020-03-27 01:32:21 +01:00
|
|
|
# https://www.domaintools.com/resources/blog/rate-limiting-with-redis
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2014-02-05 00:35:32 +01:00
|
|
|
client = get_redis_client()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
rules: Dict[str, List[Tuple[int, int]]] = settings.RATE_LIMITING_RULES
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2017-11-03 03:12:25 +01:00
|
|
|
KEY_PREFIX = ''
|
2017-05-05 12:07:10 +02:00
|
|
|
|
2019-03-23 18:33:37 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-12-12 21:13:00 +01:00
|
|
|
class RateLimiterLockingException(Exception):
|
|
|
|
pass
|
|
|
|
|
2019-12-30 02:26:08 +01:00
|
|
|
class RateLimitedObject(ABC):
|
2020-04-02 20:40:10 +02:00
|
|
|
def __init__(self, backend: Optional['Type[RateLimiterBackend]']=None) -> None:
|
|
|
|
if backend is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.backend: Type[RateLimiterBackend] = backend
|
2020-04-02 20:40:10 +02:00
|
|
|
else:
|
|
|
|
self.backend = RedisRateLimiterBackend
|
2020-03-05 13:38:20 +01:00
|
|
|
|
2020-03-04 14:05:25 +01:00
|
|
|
def rate_limit(self) -> Tuple[bool, float]:
|
|
|
|
# Returns (ratelimited, secs_to_freedom)
|
2020-04-02 22:23:20 +02:00
|
|
|
return self.backend.rate_limit_entity(self.key(), self.get_rules(),
|
2020-03-06 13:44:52 +01:00
|
|
|
self.max_api_calls(),
|
|
|
|
self.max_api_window())
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def rate_limit_request(self, request: HttpRequest) -> None:
|
|
|
|
ratelimited, time = self.rate_limit()
|
|
|
|
|
2020-04-01 13:31:20 +02:00
|
|
|
if not hasattr(request, '_ratelimits_applied'):
|
|
|
|
request._ratelimits_applied = []
|
|
|
|
request._ratelimits_applied.append(RateLimitResult(
|
2020-03-04 14:05:25 +01:00
|
|
|
entity=self,
|
|
|
|
secs_to_freedom=time,
|
2020-04-01 13:50:27 +02:00
|
|
|
remaining=0,
|
2020-03-04 14:05:25 +01:00
|
|
|
over_limit=ratelimited
|
2020-04-01 13:31:20 +02:00
|
|
|
))
|
2020-03-04 14:05:25 +01:00
|
|
|
# Abort this request if the user is over their rate limits
|
|
|
|
if ratelimited:
|
|
|
|
# Pass information about what kind of entity got limited in the exception:
|
2020-04-01 13:13:06 +02:00
|
|
|
raise RateLimited(str(time))
|
2020-03-04 14:05:25 +01:00
|
|
|
|
2020-04-01 18:44:19 +02:00
|
|
|
calls_remaining, seconds_until_reset = self.api_calls_left()
|
2020-03-04 14:05:25 +01:00
|
|
|
|
2020-04-01 13:31:20 +02:00
|
|
|
request._ratelimits_applied[-1].remaining = calls_remaining
|
2020-04-01 18:44:19 +02:00
|
|
|
request._ratelimits_applied[-1].secs_to_freedom = seconds_until_reset
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def block_access(self, seconds: int) -> None:
|
|
|
|
"Manually blocks an entity for the desired number of seconds"
|
2020-03-06 13:44:52 +01:00
|
|
|
self.backend.block_access(self.key(), seconds)
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def unblock_access(self) -> None:
|
2020-03-06 13:44:52 +01:00
|
|
|
self.backend.unblock_access(self.key())
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def clear_history(self) -> None:
|
2020-03-06 13:44:52 +01:00
|
|
|
self.backend.clear_history(self.key())
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def max_api_calls(self) -> int:
|
|
|
|
"Returns the API rate limit for the highest limit"
|
2020-04-02 22:23:20 +02:00
|
|
|
return self.get_rules()[-1][1]
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def max_api_window(self) -> int:
|
|
|
|
"Returns the API time window for the highest limit"
|
2020-04-02 22:23:20 +02:00
|
|
|
return self.get_rules()[-1][0]
|
2020-03-04 14:05:25 +01:00
|
|
|
|
|
|
|
def api_calls_left(self) -> Tuple[int, float]:
|
|
|
|
"""Returns how many API calls in this range this client has, as well as when
|
|
|
|
the rate-limit will be reset to 0"""
|
|
|
|
max_window = self.max_api_window()
|
|
|
|
max_calls = self.max_api_calls()
|
2020-03-06 13:44:52 +01:00
|
|
|
return self.backend.get_api_calls_left(self.key(), max_window, max_calls)
|
2020-03-04 14:05:25 +01:00
|
|
|
|
2020-04-02 22:23:20 +02:00
|
|
|
def get_rules(self) -> List[Tuple[int, int]]:
|
|
|
|
"""
|
|
|
|
This is a simple wrapper meant to protect against having to deal with
|
|
|
|
an empty list of rules, as it would require fiddling with that special case
|
|
|
|
all around this system. "9999 max request per seconds" should be a good proxy
|
|
|
|
for "no rules".
|
|
|
|
"""
|
|
|
|
rules_list = self.rules()
|
|
|
|
return rules_list or [(1, 9999), ]
|
|
|
|
|
2019-12-30 02:26:08 +01:00
|
|
|
@abstractmethod
|
2020-03-06 10:49:04 +01:00
|
|
|
def key(self) -> str:
|
2019-12-30 02:26:08 +01:00
|
|
|
pass
|
2017-07-28 06:40:52 +02:00
|
|
|
|
2019-12-30 02:26:08 +01:00
|
|
|
@abstractmethod
|
2017-11-05 11:15:10 +01:00
|
|
|
def rules(self) -> List[Tuple[int, int]]:
|
2019-12-30 02:26:08 +01:00
|
|
|
pass
|
2017-07-28 06:40:52 +02:00
|
|
|
|
|
|
|
class RateLimitedUser(RateLimitedObject):
|
2019-08-03 20:39:49 +02:00
|
|
|
def __init__(self, user: UserProfile, domain: str='api_by_user') -> None:
|
2017-07-28 06:40:52 +02:00
|
|
|
self.user = user
|
|
|
|
self.domain = domain
|
2020-05-25 21:50:07 +02:00
|
|
|
if settings.RUNNING_INSIDE_TORNADO and domain in settings.RATE_LIMITING_DOMAINS_FOR_TORNADO:
|
|
|
|
backend: Optional[Type[RateLimiterBackend]] = TornadoInMemoryRateLimiterBackend
|
|
|
|
else:
|
|
|
|
backend = None
|
|
|
|
super().__init__(backend=backend)
|
2017-07-28 06:40:52 +02:00
|
|
|
|
2020-03-06 10:49:04 +01:00
|
|
|
def key(self) -> str:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{type(self).__name__}:{self.user.id}:{self.domain}"
|
2017-07-28 06:40:52 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def rules(self) -> List[Tuple[int, int]]:
|
2019-08-03 20:39:49 +02:00
|
|
|
# user.rate_limits are general limits, applicable to the domain 'api_by_user'
|
|
|
|
if self.user.rate_limits != "" and self.domain == 'api_by_user':
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[Tuple[int, int]] = []
|
2017-07-28 06:40:52 +02:00
|
|
|
for limit in self.user.rate_limits.split(','):
|
|
|
|
(seconds, requests) = limit.split(':', 2)
|
|
|
|
result.append((int(seconds), int(requests)))
|
|
|
|
return result
|
2019-04-01 21:18:26 +02:00
|
|
|
return rules[self.domain]
|
2017-07-28 06:40:52 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def bounce_redis_key_prefix_for_testing(test_name: str) -> None:
|
2017-05-05 12:07:10 +02:00
|
|
|
global KEY_PREFIX
|
2018-05-11 01:40:23 +02:00
|
|
|
KEY_PREFIX = test_name + ':' + str(os.getpid()) + ':'
|
2017-05-05 12:07:10 +02:00
|
|
|
|
2019-08-03 20:39:49 +02:00
|
|
|
def add_ratelimit_rule(range_seconds: int, num_requests: int, domain: str='api_by_user') -> None:
|
2013-05-29 23:58:07 +02:00
|
|
|
"Add a rate-limiting rule to the ratelimiter"
|
2013-06-05 22:32:23 +02:00
|
|
|
global rules
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2019-04-01 21:18:26 +02:00
|
|
|
if domain not in rules:
|
|
|
|
# If we don't have any rules for domain yet, the domain key needs to be
|
|
|
|
# added to the rules dictionary.
|
|
|
|
rules[domain] = []
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2019-04-01 21:18:26 +02:00
|
|
|
rules[domain].append((range_seconds, num_requests))
|
|
|
|
rules[domain].sort(key=lambda x: x[0])
|
|
|
|
|
2019-08-03 20:39:49 +02:00
|
|
|
def remove_ratelimit_rule(range_seconds: int, num_requests: int, domain: str='api_by_user') -> None:
|
2013-05-29 23:58:07 +02:00
|
|
|
global rules
|
2019-04-01 21:18:26 +02:00
|
|
|
rules[domain] = [x for x in rules[domain] if x[0] != range_seconds and x[1] != num_requests]
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
class RateLimiterBackend(ABC):
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def block_access(cls, entity_key: str, seconds: int) -> None:
|
2020-03-05 13:38:20 +01:00
|
|
|
"Manually blocks an entity for the desired number of seconds"
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def unblock_access(cls, entity_key: str) -> None:
|
2020-03-05 13:38:20 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def clear_history(cls, entity_key: str) -> None:
|
2020-03-06 13:58:23 +01:00
|
|
|
pass
|
2020-03-05 13:38:20 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def get_api_calls_left(cls, entity_key: str, range_seconds: int,
|
2020-03-05 13:38:20 +01:00
|
|
|
max_calls: int) -> Tuple[int, float]:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]],
|
|
|
|
max_api_calls: int, max_api_window: int) -> Tuple[bool, float]:
|
2020-03-05 13:38:20 +01:00
|
|
|
# Returns (ratelimited, secs_to_freedom)
|
|
|
|
pass
|
|
|
|
|
2020-03-19 16:10:31 +01:00
|
|
|
class TornadoInMemoryRateLimiterBackend(RateLimiterBackend):
|
|
|
|
# reset_times[rule][key] is the time at which the event
|
|
|
|
# request from the rate-limited key will be accepted.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
reset_times: Dict[Tuple[int, int], Dict[str, float]] = {}
|
2020-03-19 16:10:31 +01:00
|
|
|
|
|
|
|
# last_gc_time is the last time when the garbage was
|
|
|
|
# collected from reset_times for rule (time_window, max_count).
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
last_gc_time: Dict[Tuple[int, int], float] = {}
|
2020-03-19 16:10:31 +01:00
|
|
|
|
|
|
|
# timestamps_blocked_until[key] contains the timestamp
|
|
|
|
# up to which the key has been blocked manually.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
timestamps_blocked_until: Dict[str, float] = {}
|
2020-03-19 16:10:31 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _garbage_collect_for_rule(cls, now: float, time_window: int, max_count: int) -> None:
|
|
|
|
keys_to_delete = []
|
|
|
|
reset_times_for_rule = cls.reset_times.get((time_window, max_count), None)
|
|
|
|
if reset_times_for_rule is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
keys_to_delete = [entity_key for entity_key in reset_times_for_rule
|
|
|
|
if reset_times_for_rule[entity_key] < now]
|
|
|
|
|
|
|
|
for entity_key in keys_to_delete:
|
|
|
|
del reset_times_for_rule[entity_key]
|
|
|
|
|
|
|
|
if not reset_times_for_rule:
|
|
|
|
del cls.reset_times[(time_window, max_count)]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def need_to_limit(cls, entity_key: str, time_window: int,
|
|
|
|
max_count: int) -> Tuple[bool, float]:
|
|
|
|
'''
|
|
|
|
Returns a tuple of `(rate_limited, time_till_free)`.
|
|
|
|
For simplicity, we have loosened the semantics here from
|
|
|
|
- each key may make atmost `count * (t / window)` request within any t
|
|
|
|
time interval.
|
|
|
|
to
|
|
|
|
- each key may make atmost `count * [(t / window) + 1]` request within
|
|
|
|
any t time interval.
|
|
|
|
Thus, we only need to store reset_times for each key which will be less
|
|
|
|
memory-intensive. This also has the advantage that you can only ever
|
|
|
|
lock yourself out completely for `window / count` seconds instead of
|
|
|
|
`window` seconds.
|
|
|
|
'''
|
|
|
|
now = time.time()
|
|
|
|
|
|
|
|
# Remove all timestamps from `reset_times` that are too old.
|
|
|
|
if cls.last_gc_time.get((time_window, max_count), 0) <= now - time_window / max_count:
|
|
|
|
cls.last_gc_time[(time_window, max_count)] = now
|
|
|
|
cls._garbage_collect_for_rule(now, time_window, max_count)
|
|
|
|
|
|
|
|
reset_times_for_rule = cls.reset_times.setdefault((time_window, max_count), {})
|
|
|
|
new_reset = max(reset_times_for_rule.get(entity_key, now), now) \
|
|
|
|
+ time_window / max_count
|
|
|
|
|
|
|
|
if new_reset > now + time_window:
|
|
|
|
# Compute for how long the bucket will remain filled.
|
|
|
|
time_till_free = new_reset - time_window - now
|
|
|
|
return True, time_till_free
|
|
|
|
|
|
|
|
reset_times_for_rule[entity_key] = new_reset
|
|
|
|
return False, 0.0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_api_calls_left(cls, entity_key: str, range_seconds: int,
|
|
|
|
max_calls: int) -> Tuple[int, float]:
|
|
|
|
now = time.time()
|
|
|
|
if (range_seconds, max_calls) in cls.reset_times and \
|
|
|
|
entity_key in cls.reset_times[(range_seconds, max_calls)]:
|
|
|
|
reset_time = cls.reset_times[(range_seconds, max_calls)][entity_key]
|
|
|
|
else:
|
|
|
|
return max_calls, 0
|
|
|
|
|
|
|
|
calls_remaining = (now + range_seconds - reset_time) * max_calls // range_seconds
|
|
|
|
return int(calls_remaining), reset_time - now
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def block_access(cls, entity_key: str, seconds: int) -> None:
|
|
|
|
now = time.time()
|
|
|
|
cls.timestamps_blocked_until[entity_key] = now + seconds
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def unblock_access(cls, entity_key: str) -> None:
|
|
|
|
del cls.timestamps_blocked_until[entity_key]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def clear_history(cls, entity_key: str) -> None:
|
|
|
|
for rule, reset_times_for_rule in cls.reset_times.items():
|
|
|
|
reset_times_for_rule.pop(entity_key, None)
|
|
|
|
cls.timestamps_blocked_until.pop(entity_key, None)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]],
|
|
|
|
max_api_calls: int, max_api_window: int) -> Tuple[bool, float]:
|
|
|
|
now = time.time()
|
|
|
|
if entity_key in cls.timestamps_blocked_until:
|
|
|
|
# Check whether the key is manually blocked.
|
|
|
|
if now < cls.timestamps_blocked_until[entity_key]:
|
|
|
|
blocking_ttl = cls.timestamps_blocked_until[entity_key] - now
|
|
|
|
return True, blocking_ttl
|
|
|
|
else:
|
|
|
|
del cls.timestamps_blocked_until[entity_key]
|
|
|
|
|
2020-04-02 22:23:20 +02:00
|
|
|
assert rules
|
2020-03-19 16:10:31 +01:00
|
|
|
for time_window, max_count in rules:
|
|
|
|
ratelimited, time_till_free = cls.need_to_limit(entity_key, time_window, max_count)
|
|
|
|
|
|
|
|
if ratelimited:
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.incr(f"ratelimiter.limited.{entity_key}")
|
2020-03-19 16:10:31 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
return ratelimited, time_till_free
|
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
class RedisRateLimiterBackend(RateLimiterBackend):
|
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def get_keys(cls, entity_key: str) -> List[str]:
|
2020-06-09 00:25:09 +02:00
|
|
|
return [f"{KEY_PREFIX}ratelimit:{entity_key}:{keytype}"
|
2020-03-06 13:44:52 +01:00
|
|
|
for keytype in ['list', 'zset', 'block']]
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def block_access(cls, entity_key: str, seconds: int) -> None:
|
2020-03-05 13:38:20 +01:00
|
|
|
"Manually blocks an entity for the desired number of seconds"
|
2020-03-06 13:44:52 +01:00
|
|
|
_, _, blocking_key = cls.get_keys(entity_key)
|
2020-03-05 13:38:20 +01:00
|
|
|
with client.pipeline() as pipe:
|
|
|
|
pipe.set(blocking_key, 1)
|
|
|
|
pipe.expire(blocking_key, seconds)
|
|
|
|
pipe.execute()
|
|
|
|
|
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def unblock_access(cls, entity_key: str) -> None:
|
|
|
|
_, _, blocking_key = cls.get_keys(entity_key)
|
2020-03-05 13:38:20 +01:00
|
|
|
client.delete(blocking_key)
|
|
|
|
|
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def clear_history(cls, entity_key: str) -> None:
|
|
|
|
for key in cls.get_keys(entity_key):
|
2020-03-05 13:38:20 +01:00
|
|
|
client.delete(key)
|
|
|
|
|
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def get_api_calls_left(cls, entity_key: str, range_seconds: int,
|
2020-03-05 13:38:20 +01:00
|
|
|
max_calls: int) -> Tuple[int, float]:
|
2020-03-06 13:44:52 +01:00
|
|
|
list_key, set_key, _ = cls.get_keys(entity_key)
|
2020-03-05 13:38:20 +01:00
|
|
|
# Count the number of values in our sorted set
|
|
|
|
# that are between now and the cutoff
|
|
|
|
now = time.time()
|
|
|
|
boundary = now - range_seconds
|
|
|
|
|
|
|
|
with client.pipeline() as pipe:
|
|
|
|
# Count how many API calls in our range have already been made
|
|
|
|
pipe.zcount(set_key, boundary, now)
|
|
|
|
# Get the newest call so we can calculate when the ratelimit
|
|
|
|
# will reset to 0
|
|
|
|
pipe.lindex(list_key, 0)
|
|
|
|
|
|
|
|
results = pipe.execute()
|
2013-05-29 23:58:07 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
count: int = results[0]
|
|
|
|
newest_call: Optional[bytes] = results[1]
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
calls_left = max_calls - count
|
|
|
|
if newest_call is not None:
|
|
|
|
time_reset = now + (range_seconds - (now - float(newest_call)))
|
|
|
|
else:
|
|
|
|
time_reset = now
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-04-01 18:44:19 +02:00
|
|
|
return calls_left, time_reset - now
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def is_ratelimited(cls, entity_key: str, rules: List[Tuple[int, int]]) -> Tuple[bool, float]:
|
2020-03-05 13:38:20 +01:00
|
|
|
"Returns a tuple of (rate_limited, time_till_free)"
|
2020-04-02 22:23:20 +02:00
|
|
|
assert rules
|
2020-03-06 13:44:52 +01:00
|
|
|
list_key, set_key, blocking_key = cls.get_keys(entity_key)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Go through the rules from shortest to longest,
|
|
|
|
# seeing if this user has violated any of them. First
|
|
|
|
# get the timestamps for each nth items
|
|
|
|
with client.pipeline() as pipe:
|
|
|
|
for _, request_count in rules:
|
|
|
|
pipe.lindex(list_key, request_count - 1) # 0-indexed list
|
|
|
|
|
|
|
|
# Get blocking info
|
|
|
|
pipe.get(blocking_key)
|
|
|
|
pipe.ttl(blocking_key)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
rule_timestamps: List[Optional[bytes]] = pipe.execute()
|
2020-03-05 13:38:20 +01:00
|
|
|
|
|
|
|
# Check if there is a manual block on this API key
|
|
|
|
blocking_ttl_b = rule_timestamps.pop()
|
|
|
|
key_blocked = rule_timestamps.pop()
|
|
|
|
|
|
|
|
if key_blocked is not None:
|
|
|
|
# We are manually blocked. Report for how much longer we will be
|
2020-04-02 23:00:56 +02:00
|
|
|
if blocking_ttl_b is None: # nocoverage # defensive code, this should never happen
|
2020-03-05 13:38:20 +01:00
|
|
|
blocking_ttl = 0.5
|
|
|
|
else:
|
|
|
|
blocking_ttl = int(blocking_ttl_b)
|
|
|
|
return True, blocking_ttl
|
|
|
|
|
|
|
|
now = time.time()
|
|
|
|
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules):
|
|
|
|
# Check if the nth timestamp is newer than the associated rule. If so,
|
|
|
|
# it means we've hit our limit for this rule
|
|
|
|
if timestamp is None:
|
|
|
|
continue
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
boundary = float(timestamp) + range_seconds
|
2020-04-02 17:44:23 +02:00
|
|
|
if boundary >= now:
|
2020-03-05 13:38:20 +01:00
|
|
|
free = boundary - now
|
|
|
|
return True, free
|
|
|
|
|
2013-05-29 23:58:07 +02:00
|
|
|
return False, 0.0
|
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
@classmethod
|
2020-04-02 22:23:20 +02:00
|
|
|
def incr_ratelimit(cls, entity_key: str, max_api_calls: int, max_api_window: int) -> None:
|
2020-03-05 13:38:20 +01:00
|
|
|
"""Increases the rate-limit for the specified entity"""
|
2020-03-06 13:44:52 +01:00
|
|
|
list_key, set_key, _ = cls.get_keys(entity_key)
|
2020-03-05 13:38:20 +01:00
|
|
|
now = time.time()
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Start redis transaction
|
|
|
|
with client.pipeline() as pipe:
|
|
|
|
count = 0
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
# To avoid a race condition between getting the element we might trim from our list
|
|
|
|
# and removing it from our associated set, we abort this whole transaction if
|
|
|
|
# another agent manages to change our list out from under us
|
|
|
|
# When watching a value, the pipeline is set to Immediate mode
|
|
|
|
pipe.watch(list_key)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Get the last elem that we'll trim (so we can remove it from our sorted set)
|
2020-03-06 13:44:52 +01:00
|
|
|
last_val = pipe.lindex(list_key, max_api_calls - 1)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Restart buffered execution
|
|
|
|
pipe.multi()
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Add this timestamp to our list
|
|
|
|
pipe.lpush(list_key, now)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Trim our list to the oldest rule we have
|
2020-03-06 13:44:52 +01:00
|
|
|
pipe.ltrim(list_key, 0, max_api_calls - 1)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Add our new value to the sorted set that we keep
|
|
|
|
# We need to put the score and val both as timestamp,
|
|
|
|
# as we sort by score but remove by value
|
|
|
|
pipe.zadd(set_key, {str(now): now})
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Remove the trimmed value from our sorted set, if there was one
|
|
|
|
if last_val is not None:
|
|
|
|
pipe.zrem(set_key, last_val)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# Set the TTL for our keys as well
|
2020-03-06 13:44:52 +01:00
|
|
|
api_window = max_api_window
|
2020-03-05 13:38:20 +01:00
|
|
|
pipe.expire(list_key, api_window)
|
|
|
|
pipe.expire(set_key, api_window)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
pipe.execute()
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
# If no exception was raised in the execution, there were no transaction conflicts
|
|
|
|
break
|
2020-04-02 23:00:56 +02:00
|
|
|
except redis.WatchError: # nocoverage # Ideally we'd have a test for this.
|
2020-03-05 13:38:20 +01:00
|
|
|
if count > 10:
|
|
|
|
raise RateLimiterLockingException()
|
|
|
|
count += 1
|
2013-06-05 22:32:23 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
continue
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
@classmethod
|
2020-03-06 13:44:52 +01:00
|
|
|
def rate_limit_entity(cls, entity_key: str, rules: List[Tuple[int, int]],
|
|
|
|
max_api_calls: int, max_api_window: int) -> Tuple[bool, float]:
|
|
|
|
ratelimited, time = cls.is_ratelimited(entity_key, rules)
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2020-03-05 13:38:20 +01:00
|
|
|
if ratelimited:
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.incr(f"ratelimiter.limited.{entity_key}")
|
2020-03-05 13:38:20 +01:00
|
|
|
|
|
|
|
else:
|
|
|
|
try:
|
2020-04-02 22:23:20 +02:00
|
|
|
cls.incr_ratelimit(entity_key, max_api_calls, max_api_window)
|
2020-03-05 13:38:20 +01:00
|
|
|
except RateLimiterLockingException:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("Deadlock trying to incr_ratelimit for %s", entity_key)
|
2020-03-05 13:38:20 +01:00
|
|
|
# rate-limit users who are hitting the API so hard we can't update our stats.
|
|
|
|
ratelimited = True
|
|
|
|
|
|
|
|
return ratelimited, time
|
2019-03-23 18:33:37 +01:00
|
|
|
|
2019-12-28 20:23:18 +01:00
|
|
|
class RateLimitResult:
|
|
|
|
def __init__(self, entity: RateLimitedObject, secs_to_freedom: float, over_limit: bool,
|
2020-04-01 13:50:27 +02:00
|
|
|
remaining: int) -> None:
|
2019-12-28 20:23:18 +01:00
|
|
|
if over_limit:
|
|
|
|
assert not remaining
|
|
|
|
|
|
|
|
self.entity = entity
|
|
|
|
self.secs_to_freedom = secs_to_freedom
|
|
|
|
self.over_limit = over_limit
|
|
|
|
self.remaining = remaining
|