2018-07-31 23:07:42 +02:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
|
2020-06-11 00:54:34 +02:00
|
|
|
import hashlib
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import traceback
|
2022-06-26 20:21:11 +02:00
|
|
|
from functools import _lru_cache_wrapper, lru_cache, wraps
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
2022-06-26 20:21:11 +02:00
|
|
|
Generic,
|
2020-06-11 00:54:34 +02:00
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
)
|
2012-11-02 00:23:26 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2016-10-10 15:09:41 +02:00
|
|
|
from django.core.cache import caches
|
2016-06-11 12:18:44 +02:00
|
|
|
from django.core.cache.backends.base import BaseCache
|
2023-03-16 20:00:51 +01:00
|
|
|
from django.db.models import Q
|
2019-01-27 07:18:19 +01:00
|
|
|
from django.http import HttpRequest
|
2023-03-16 20:00:51 +01:00
|
|
|
from django_stubs_ext import QuerySetAny
|
2022-04-13 05:42:12 +02:00
|
|
|
from typing_extensions import ParamSpec
|
2012-09-19 18:41:20 +02:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
if TYPE_CHECKING:
|
2016-06-10 18:06:39 +02:00
|
|
|
# These modules have to be imported for type annotations but
|
|
|
|
# they cannot be imported at runtime due to cyclic dependency.
|
2021-07-16 00:45:17 +02:00
|
|
|
from zerver.models import Attachment, Message, MutedUser, Realm, Stream, SubMessage, UserProfile
|
2016-06-10 18:06:39 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
MEMCACHED_MAX_KEY_LENGTH = 250
|
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
ParamT = ParamSpec("ParamT")
|
|
|
|
ReturnT = TypeVar("ReturnT")
|
2016-07-22 15:10:19 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2016-03-31 03:23:21 +02:00
|
|
|
remote_cache_time_start = 0.0
|
2016-03-31 03:25:46 +02:00
|
|
|
remote_cache_total_time = 0.0
|
|
|
|
remote_cache_total_requests = 0
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_remote_cache_time() -> float:
|
2016-03-31 03:25:46 +02:00
|
|
|
return remote_cache_total_time
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_remote_cache_requests() -> int:
|
2016-03-31 03:25:46 +02:00
|
|
|
return remote_cache_total_requests
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remote_cache_stats_start() -> None:
|
2016-03-31 03:23:21 +02:00
|
|
|
global remote_cache_time_start
|
|
|
|
remote_cache_time_start = time.time()
|
2013-05-10 16:57:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remote_cache_stats_finish() -> None:
|
2016-03-31 03:25:46 +02:00
|
|
|
global remote_cache_total_time
|
|
|
|
global remote_cache_total_requests
|
|
|
|
remote_cache_total_requests += 1
|
2021-02-12 08:19:30 +01:00
|
|
|
remote_cache_total_time += time.time() - remote_cache_time_start
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_or_create_key_prefix() -> str:
|
2020-09-05 15:37:07 +02:00
|
|
|
if settings.PUPPETEER_TESTS:
|
|
|
|
# This sets the prefix for the benefit of the Puppeteer tests.
|
2016-11-21 09:18:50 +01:00
|
|
|
#
|
|
|
|
# Having a fixed key is OK since we don't support running
|
2020-10-23 02:43:28 +02:00
|
|
|
# multiple copies of the Puppeteer tests at the same time anyway.
|
2021-02-12 08:20:45 +01:00
|
|
|
return "puppeteer_tests:"
|
2016-12-15 07:02:42 +01:00
|
|
|
elif settings.TEST_SUITE:
|
|
|
|
# The Python tests overwrite KEY_PREFIX on each test, but use
|
|
|
|
# this codepath as well, just to save running the more complex
|
|
|
|
# code below for reading the normal key prefix.
|
2021-02-12 08:20:45 +01:00
|
|
|
return "django_tests_unused:"
|
2016-12-15 07:02:42 +01:00
|
|
|
|
2016-07-14 05:21:59 +02:00
|
|
|
# directory `var` should exist in production
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(os.path.join(settings.DEPLOY_ROOT, "var"), exist_ok=True)
|
2013-07-03 01:26:00 +02:00
|
|
|
|
2016-07-14 05:21:59 +02:00
|
|
|
filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix")
|
2013-05-30 21:02:12 +02:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(filename, "x") as f:
|
|
|
|
prefix = secrets.token_hex(16) + ":"
|
2013-05-30 21:02:12 +02:00
|
|
|
f.write(prefix + "\n")
|
2020-10-20 03:14:06 +02:00
|
|
|
except FileExistsError:
|
2013-05-30 21:02:12 +02:00
|
|
|
tries = 1
|
|
|
|
while tries < 10:
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(filename) as f:
|
2013-05-30 21:02:12 +02:00
|
|
|
prefix = f.readline()[:-1]
|
|
|
|
if len(prefix) == 33:
|
|
|
|
break
|
|
|
|
tries += 1
|
2021-02-12 08:20:45 +01:00
|
|
|
prefix = ""
|
2013-05-30 21:02:12 +02:00
|
|
|
time.sleep(0.5)
|
|
|
|
|
|
|
|
if not prefix:
|
2016-03-31 03:32:06 +02:00
|
|
|
print("Could not read remote cache key prefix file")
|
2016-01-26 00:56:46 +01:00
|
|
|
sys.exit(1)
|
2013-05-30 21:02:12 +02:00
|
|
|
|
|
|
|
return prefix
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
KEY_PREFIX: str = get_or_create_key_prefix()
|
2013-05-30 21:02:12 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def bounce_key_prefix_for_testing(test_name: str) -> None:
|
2013-07-02 19:10:50 +02:00
|
|
|
global KEY_PREFIX
|
2021-02-12 08:20:45 +01:00
|
|
|
KEY_PREFIX = test_name + ":" + str(os.getpid()) + ":"
|
2017-05-05 15:19:11 +02:00
|
|
|
# We are taking the hash of the KEY_PREFIX to decrease the size of the key.
|
2019-12-16 05:53:54 +01:00
|
|
|
# Memcached keys should have a length of less than 250.
|
2021-08-02 23:20:39 +02:00
|
|
|
KEY_PREFIX = hashlib.sha1(KEY_PREFIX.encode()).hexdigest() + ":"
|
2013-07-02 19:10:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
|
2013-06-19 19:44:03 +02:00
|
|
|
if cache_name is None:
|
2022-04-14 06:51:07 +02:00
|
|
|
cache_name = "default"
|
2016-10-10 15:09:41 +02:00
|
|
|
return caches[cache_name]
|
2013-06-19 19:44:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-16 17:50:41 +01:00
|
|
|
def cache_with_key(
|
2022-04-13 05:42:12 +02:00
|
|
|
keyfunc: Callable[ParamT, str],
|
2021-02-12 08:19:30 +01:00
|
|
|
cache_name: Optional[str] = None,
|
|
|
|
timeout: Optional[int] = None,
|
2022-04-13 05:42:12 +02:00
|
|
|
) -> Callable[[Callable[ParamT, ReturnT]], Callable[ParamT, ReturnT]]:
|
2012-09-19 18:41:20 +02:00
|
|
|
"""Decorator which applies Django caching to a function.
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
Decorator argument is a function which computes a cache key
|
|
|
|
from the original function's arguments. You are responsible
|
|
|
|
for avoiding collisions with other uses of this decorator or
|
|
|
|
other uses of caching."""
|
2012-09-19 18:41:20 +02:00
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
def decorator(func: Callable[ParamT, ReturnT]) -> Callable[ParamT, ReturnT]:
|
2012-11-02 00:23:26 +01:00
|
|
|
@wraps(func)
|
2022-04-13 05:42:12 +02:00
|
|
|
def func_with_caching(*args: ParamT.args, **kwargs: ParamT.kwargs) -> ReturnT:
|
2013-06-20 16:41:23 +02:00
|
|
|
key = keyfunc(*args, **kwargs)
|
2013-05-10 16:57:06 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
try:
|
|
|
|
val = cache_get(key, cache_name=cache_name)
|
2022-11-17 09:30:48 +01:00
|
|
|
except InvalidCacheKeyError:
|
2019-12-16 05:53:54 +01:00
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
log_invalid_cache_keys(stack_trace, [key])
|
|
|
|
return func(*args, **kwargs)
|
2012-09-19 18:41:20 +02:00
|
|
|
|
|
|
|
# Values are singleton tuples so that we can distinguish
|
|
|
|
# a result of None from a missing key.
|
|
|
|
if val is not None:
|
|
|
|
return val[0]
|
|
|
|
|
|
|
|
val = func(*args, **kwargs)
|
2023-03-16 20:00:51 +01:00
|
|
|
if isinstance(val, QuerySetAny):
|
2022-11-18 18:49:56 +01:00
|
|
|
logging.error(
|
|
|
|
"cache_with_key attempted to store a full QuerySet object -- declining to cache",
|
2022-10-12 17:40:24 +02:00
|
|
|
stack_info=True,
|
|
|
|
)
|
2022-11-18 18:49:56 +01:00
|
|
|
else:
|
|
|
|
cache_set(key, val, cache_name=cache_name, timeout=timeout)
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2012-09-19 18:41:20 +02:00
|
|
|
return val
|
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
return func_with_caching
|
2012-09-19 18:41:20 +02:00
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class InvalidCacheKeyError(Exception):
|
2019-12-16 05:53:54 +01:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def log_invalid_cache_keys(stack_trace: str, key: List[str]) -> None:
|
|
|
|
logger.warning(
|
2021-02-12 08:19:30 +01:00
|
|
|
"Invalid cache key used: %s\nStack trace: %s\n",
|
|
|
|
key,
|
|
|
|
stack_trace,
|
2019-12-16 05:53:54 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def validate_cache_key(key: str) -> None:
|
|
|
|
if not key.startswith(KEY_PREFIX):
|
|
|
|
key = KEY_PREFIX + key
|
|
|
|
|
|
|
|
# Theoretically memcached can handle non-ascii characters
|
|
|
|
# and only "control" characters are strictly disallowed, see:
|
|
|
|
# https://github.com/memcached/memcached/blob/master/doc/protocol.txt
|
|
|
|
# However, limiting the characters we allow in keys simiplifies things,
|
2023-07-19 00:44:51 +02:00
|
|
|
# and anyway we use a hash function when forming some keys to ensure
|
2019-12-16 05:53:54 +01:00
|
|
|
# the resulting keys fit the regex below.
|
|
|
|
# The regex checks "all characters between ! and ~ in the ascii table",
|
|
|
|
# which happens to be the set of all "nice" ascii characters.
|
|
|
|
if not bool(re.fullmatch(r"([!-~])+", key)):
|
2022-11-17 09:30:48 +01:00
|
|
|
raise InvalidCacheKeyError("Invalid characters in the cache key: " + key)
|
2019-12-16 05:53:54 +01:00
|
|
|
if len(key) > MEMCACHED_MAX_KEY_LENGTH:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise InvalidCacheKeyError(f"Cache key too long: {key} Length: {len(key)}")
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_set(
|
|
|
|
key: str, val: Any, cache_name: Optional[str] = None, timeout: Optional[int] = None
|
|
|
|
) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
cache_backend = get_cache_backend(cache_name)
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_backend.set(final_key, (val,), timeout=timeout)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-05-30 20:01:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_get(key: str, cache_name: Optional[str] = None) -> Any:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
cache_backend = get_cache_backend(cache_name)
|
2019-12-16 05:53:54 +01:00
|
|
|
ret = cache_backend.get(final_key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-06-18 21:08:16 +02:00
|
|
|
return ret
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
|
2016-06-10 00:12:34 +02:00
|
|
|
keys = [KEY_PREFIX + key for key in keys]
|
2019-12-16 05:53:54 +01:00
|
|
|
for key in keys:
|
|
|
|
validate_cache_key(key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
ret = get_cache_backend(cache_name).get_many(keys)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2021-02-12 08:19:30 +01:00
|
|
|
return {key[len(KEY_PREFIX) :]: value for key, value in ret.items()}
|
|
|
|
|
2013-04-22 16:29:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def safe_cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
|
2019-12-16 05:53:54 +01:00
|
|
|
"""Variant of cache_get_many that drops any keys that fail
|
|
|
|
validation, rather than throwing an exception visible to the
|
|
|
|
caller."""
|
|
|
|
try:
|
|
|
|
# Almost always the keys will all be correct, so we just try
|
|
|
|
# to do normal cache_get_many to avoid the overhead of
|
|
|
|
# validating all the keys here.
|
|
|
|
return cache_get_many(keys, cache_name)
|
2022-11-17 09:30:48 +01:00
|
|
|
except InvalidCacheKeyError:
|
2019-12-16 05:53:54 +01:00
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
good_keys, bad_keys = filter_good_and_bad_keys(keys)
|
|
|
|
|
|
|
|
log_invalid_cache_keys(stack_trace, bad_keys)
|
|
|
|
return cache_get_many(good_keys, cache_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_set_many(
|
|
|
|
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
|
|
|
|
) -> None:
|
2013-05-30 21:02:12 +02:00
|
|
|
new_items = {}
|
|
|
|
for key in items:
|
2019-12-16 05:53:54 +01:00
|
|
|
new_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(new_key)
|
|
|
|
new_items[new_key] = items[key]
|
2016-06-10 00:12:34 +02:00
|
|
|
items = new_items
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2016-06-09 23:35:53 +02:00
|
|
|
get_cache_backend(cache_name).set_many(items, timeout=timeout)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-04-25 20:41:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def safe_cache_set_many(
|
|
|
|
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
|
|
|
|
) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
"""Variant of cache_set_many that drops saving any keys that fail
|
|
|
|
validation, rather than throwing an exception visible to the
|
|
|
|
caller."""
|
|
|
|
try:
|
|
|
|
# Almost always the keys will all be correct, so we just try
|
|
|
|
# to do normal cache_set_many to avoid the overhead of
|
|
|
|
# validating all the keys here.
|
|
|
|
return cache_set_many(items, cache_name, timeout)
|
2022-11-17 09:30:48 +01:00
|
|
|
except InvalidCacheKeyError:
|
2019-12-16 05:53:54 +01:00
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
|
|
|
|
good_keys, bad_keys = filter_good_and_bad_keys(list(items.keys()))
|
|
|
|
log_invalid_cache_keys(stack_trace, bad_keys)
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
good_items = {key: items[key] for key in good_keys}
|
2019-12-16 05:53:54 +01:00
|
|
|
return cache_set_many(good_items, cache_name, timeout)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_delete(key: str, cache_name: Optional[str] = None) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2019-12-16 05:53:54 +01:00
|
|
|
get_cache_backend(cache_name).delete(final_key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-08-28 00:19:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def cache_delete_many(items: Iterable[str], cache_name: Optional[str] = None) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
keys = [KEY_PREFIX + item for item in items]
|
|
|
|
for key in keys:
|
|
|
|
validate_cache_key(key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2019-12-16 05:53:54 +01:00
|
|
|
get_cache_backend(cache_name).delete_many(keys)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-08-22 16:45:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def filter_good_and_bad_keys(keys: List[str]) -> Tuple[List[str], List[str]]:
|
|
|
|
good_keys = []
|
|
|
|
bad_keys = []
|
|
|
|
for key in keys:
|
|
|
|
try:
|
|
|
|
validate_cache_key(key)
|
|
|
|
good_keys.append(key)
|
2022-11-17 09:30:48 +01:00
|
|
|
except InvalidCacheKeyError:
|
2019-12-16 05:53:54 +01:00
|
|
|
bad_keys.append(key)
|
|
|
|
|
|
|
|
return good_keys, bad_keys
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
# Generic_bulk_cached fetch and its helpers. We start with declaring
|
|
|
|
# a few type variables that help define its interface.
|
|
|
|
|
|
|
|
# Type for the cache's keys; will typically be int or str.
|
2021-02-12 08:20:45 +01:00
|
|
|
ObjKT = TypeVar("ObjKT")
|
2019-08-08 21:34:06 +02:00
|
|
|
|
|
|
|
# Type for items to be fetched from the database (e.g. a Django model object)
|
2021-02-12 08:20:45 +01:00
|
|
|
ItemT = TypeVar("ItemT")
|
2019-08-08 21:34:06 +02:00
|
|
|
|
|
|
|
# Type for items to be stored in the cache (e.g. a dictionary serialization).
|
|
|
|
# Will equal ItemT unless a cache_transformer is specified.
|
2021-02-12 08:20:45 +01:00
|
|
|
CacheItemT = TypeVar("CacheItemT")
|
2019-08-08 21:34:06 +02:00
|
|
|
|
|
|
|
# Type for compressed items for storage in the cache. For
|
|
|
|
# serializable objects, will be the object; if encoded, bytes.
|
2021-02-12 08:20:45 +01:00
|
|
|
CompressedItemT = TypeVar("CompressedItemT")
|
2017-10-28 18:57:57 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# Required arguments are as follows:
|
2013-06-27 21:55:42 +02:00
|
|
|
# * object_ids: The list of object ids to look up
|
|
|
|
# * cache_key_function: object_id => cache key
|
|
|
|
# * query_function: [object_ids] => [objects from database]
|
|
|
|
# * setter: Function to call before storing items to cache (e.g. compression)
|
|
|
|
# * extractor: Function to call on items returned from cache
|
|
|
|
# (e.g. decompression). Should be the inverse of the setter
|
|
|
|
# function.
|
|
|
|
# * id_fetcher: Function mapping an object from database => object_id
|
|
|
|
# (in case we're using a key more complex than obj.id)
|
|
|
|
# * cache_transformer: Function mapping an object from database =>
|
|
|
|
# value for cache (in case the values that we're caching are some
|
|
|
|
# function of the objects, not the objects themselves)
|
2017-10-28 18:57:57 +02:00
|
|
|
def generic_bulk_cached_fetch(
|
2021-02-12 08:19:30 +01:00
|
|
|
cache_key_function: Callable[[ObjKT], str],
|
|
|
|
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
|
|
|
|
object_ids: Sequence[ObjKT],
|
|
|
|
*,
|
|
|
|
extractor: Callable[[CompressedItemT], CacheItemT],
|
|
|
|
setter: Callable[[CacheItemT], CompressedItemT],
|
|
|
|
id_fetcher: Callable[[ItemT], ObjKT],
|
|
|
|
cache_transformer: Callable[[ItemT], CacheItemT],
|
2019-08-08 21:34:06 +02:00
|
|
|
) -> Dict[ObjKT, CacheItemT]:
|
2019-08-10 23:31:14 +02:00
|
|
|
if len(object_ids) == 0:
|
|
|
|
# Nothing to fetch.
|
|
|
|
return {}
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
cache_keys: Dict[ObjKT, str] = {}
|
2013-06-27 21:55:42 +02:00
|
|
|
for object_id in object_ids:
|
|
|
|
cache_keys[object_id] = cache_key_function(object_id)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
[cache_keys[object_id] for object_id in object_ids],
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2023-07-31 22:16:30 +02:00
|
|
|
cached_objects = {key: extractor(val[0]) for key, val in cached_objects_compressed.items()}
|
2021-02-12 08:19:30 +01:00
|
|
|
needed_ids = [
|
|
|
|
object_id for object_id in object_ids if cache_keys[object_id] not in cached_objects
|
|
|
|
]
|
2019-08-10 23:31:14 +02:00
|
|
|
|
|
|
|
# Only call query_function if there are some ids to fetch from the database:
|
|
|
|
if len(needed_ids) > 0:
|
|
|
|
db_objects = query_function(needed_ids)
|
|
|
|
else:
|
|
|
|
db_objects = []
|
2013-06-27 21:55:42 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
items_for_remote_cache: Dict[str, Tuple[CompressedItemT]] = {}
|
2013-06-27 21:55:42 +02:00
|
|
|
for obj in db_objects:
|
|
|
|
key = cache_keys[id_fetcher(obj)]
|
|
|
|
item = cache_transformer(obj)
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[key] = (setter(item),)
|
2013-06-27 21:55:42 +02:00
|
|
|
cached_objects[key] = item
|
2016-03-31 03:21:05 +02:00
|
|
|
if len(items_for_remote_cache) > 0:
|
2019-12-16 05:53:54 +01:00
|
|
|
safe_cache_set_many(items_for_remote_cache)
|
2021-02-12 08:19:30 +01:00
|
|
|
return {
|
|
|
|
object_id: cached_objects[cache_keys[object_id]]
|
|
|
|
for object_id in object_ids
|
|
|
|
if cache_keys[object_id] in cached_objects
|
|
|
|
}
|
|
|
|
|
2013-06-27 21:55:42 +02:00
|
|
|
|
2020-07-01 03:29:31 +02:00
|
|
|
def bulk_cached_fetch(
|
|
|
|
cache_key_function: Callable[[ObjKT], str],
|
|
|
|
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
|
|
|
|
object_ids: Sequence[ObjKT],
|
|
|
|
*,
|
|
|
|
id_fetcher: Callable[[ItemT], ObjKT],
|
|
|
|
) -> Dict[ObjKT, ItemT]:
|
2023-07-19 19:14:43 +02:00
|
|
|
return generic_bulk_cached_fetch(
|
2020-07-01 03:29:31 +02:00
|
|
|
cache_key_function,
|
|
|
|
query_function,
|
|
|
|
object_ids,
|
|
|
|
id_fetcher=id_fetcher,
|
2023-07-19 19:14:43 +02:00
|
|
|
extractor=lambda obj: obj,
|
|
|
|
setter=lambda obj: obj,
|
2020-07-01 03:29:31 +02:00
|
|
|
cache_transformer=lambda obj: obj,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-14 14:41:15 +02:00
|
|
|
def preview_url_cache_key(url: str) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return f"preview_url:{hashlib.sha1(url.encode()).hexdigest()}"
|
2018-10-14 14:41:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def display_recipient_cache_key(recipient_id: int) -> str:
|
2020-06-13 08:59:37 +02:00
|
|
|
return f"display_recipient_dict:{recipient_id}"
|
2013-08-22 17:44:52 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-16 19:08:55 +02:00
|
|
|
def single_user_display_recipient_cache_key(user_id: int) -> str:
|
|
|
|
return f"single_user_display_recipient:{user_id}"
|
2019-08-07 00:18:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return f"user_profile:{hashlib.sha1(email.strip().encode()).hexdigest()}:{realm_id}"
|
2017-11-27 23:33:13 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def user_profile_cache_key(email: str, realm: "Realm") -> str:
|
2017-11-27 23:33:13 +01:00
|
|
|
return user_profile_cache_key_id(email, realm.id)
|
2017-05-22 19:45:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-03-04 21:08:53 +01:00
|
|
|
def user_profile_delivery_email_cache_key(delivery_email: str, realm_id: int) -> str:
|
|
|
|
return f"user_profile_by_delivery_email:{hashlib.sha1(delivery_email.strip().encode()).hexdigest()}:{realm_id}"
|
2021-03-04 14:36:31 +01:00
|
|
|
|
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
def bot_profile_cache_key(email: str, realm_id: int) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return f"bot_profile:{hashlib.sha1(email.strip().encode()).hexdigest()}"
|
2017-05-22 23:37:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"user_profile_by_id:{user_profile_id}"
|
2013-03-18 17:10:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_by_api_key_cache_key(api_key: str) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"user_profile_by_api_key:{api_key}"
|
2017-08-25 07:43:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
def get_cross_realm_dicts_key() -> str:
|
|
|
|
emails = list(settings.CROSS_REALM_BOT_EMAILS)
|
|
|
|
raw_key = ",".join(sorted(emails))
|
|
|
|
digest = hashlib.sha1(raw_key.encode()).hexdigest()
|
|
|
|
return f"get_cross_realm_dicts:{digest}"
|
|
|
|
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
realm_user_dict_fields: List[str] = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"id",
|
|
|
|
"full_name",
|
|
|
|
"email",
|
|
|
|
"avatar_source",
|
|
|
|
"avatar_version",
|
|
|
|
"is_active",
|
|
|
|
"role",
|
2021-05-28 12:51:50 +02:00
|
|
|
"is_billing_admin",
|
2021-02-12 08:20:45 +01:00
|
|
|
"is_bot",
|
|
|
|
"timezone",
|
|
|
|
"date_joined",
|
|
|
|
"bot_owner_id",
|
|
|
|
"delivery_email",
|
|
|
|
"bot_type",
|
|
|
|
"long_term_idle",
|
2021-10-26 09:15:16 +02:00
|
|
|
"email_address_visibility",
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2017-02-21 17:55:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def realm_user_dicts_cache_key(realm_id: int) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"realm_user_dicts:{realm_id}"
|
2013-10-23 23:16:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-06 04:47:36 +02:00
|
|
|
def get_muting_users_cache_key(muted_user_id: int) -> str:
|
|
|
|
return f"muting_users_list:{muted_user_id}"
|
2021-03-27 13:31:26 +01:00
|
|
|
|
|
|
|
|
2023-06-08 21:46:38 +02:00
|
|
|
def get_realm_used_upload_space_cache_key(realm_id: int) -> str:
|
|
|
|
return f"realm_used_upload_space:{realm_id}"
|
2019-01-14 07:46:31 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def active_user_ids_cache_key(realm_id: int) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"active_user_ids:{realm_id}"
|
2017-09-16 21:44:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-03 19:11:52 +02:00
|
|
|
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"active_non_guest_user_ids:{realm_id}"
|
2018-06-03 19:11:52 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
bot_dict_fields: List[str] = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"api_key",
|
|
|
|
"avatar_source",
|
|
|
|
"avatar_version",
|
2021-04-22 16:23:09 +02:00
|
|
|
"bot_owner_id",
|
2021-02-12 08:20:45 +01:00
|
|
|
"bot_type",
|
|
|
|
"default_all_public_streams",
|
|
|
|
"default_events_register_stream__name",
|
|
|
|
"default_sending_stream__name",
|
|
|
|
"email",
|
|
|
|
"full_name",
|
|
|
|
"id",
|
|
|
|
"is_active",
|
|
|
|
"realm_id",
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2017-02-06 20:45:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 21:42:16 +02:00
|
|
|
def bot_dicts_in_realm_cache_key(realm_id: int) -> str:
|
|
|
|
return f"bot_dicts_in_realm:{realm_id}"
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-03-04 21:08:53 +01:00
|
|
|
def delete_user_profile_caches(user_profiles: Iterable["UserProfile"], realm_id: int) -> None:
|
2018-08-01 10:53:40 +02:00
|
|
|
# Imported here to avoid cyclic dependency.
|
|
|
|
from zerver.lib.users import get_all_api_keys
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import is_cross_realm_bot_email
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-07-27 10:05:37 +02:00
|
|
|
keys = []
|
2014-01-28 20:05:52 +01:00
|
|
|
for user_profile in user_profiles:
|
2016-07-27 10:05:37 +02:00
|
|
|
keys.append(user_profile_by_id_cache_key(user_profile.id))
|
2023-07-31 22:52:35 +02:00
|
|
|
keys += map(user_profile_by_api_key_cache_key, get_all_api_keys(user_profile))
|
2024-03-04 21:08:53 +01:00
|
|
|
keys.append(user_profile_cache_key_id(user_profile.email, realm_id))
|
|
|
|
keys.append(user_profile_delivery_email_cache_key(user_profile.delivery_email, realm_id))
|
2019-07-13 00:11:58 +02:00
|
|
|
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):
|
|
|
|
# Handle clearing system bots from their special cache.
|
2024-03-04 21:08:53 +01:00
|
|
|
keys.append(bot_profile_cache_key(user_profile.email, realm_id))
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
keys.append(get_cross_realm_dicts_key())
|
2016-07-27 10:05:37 +02:00
|
|
|
|
|
|
|
cache_delete_many(keys)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def delete_display_recipient_cache(user_profile: "UserProfile") -> None:
|
2017-01-23 12:11:04 +01:00
|
|
|
from zerver.models import Subscription # We need to import here to avoid cyclic dependency.
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:21:54 +02:00
|
|
|
recipient_ids = Subscription.objects.filter(user_profile=user_profile).values_list(
|
|
|
|
"recipient_id", flat=True
|
|
|
|
)
|
2017-01-23 12:11:04 +01:00
|
|
|
keys = [display_recipient_cache_key(rid) for rid in recipient_ids]
|
2023-07-16 19:08:55 +02:00
|
|
|
keys.append(single_user_display_recipient_cache_key(user_profile.id))
|
2017-01-23 12:11:04 +01:00
|
|
|
cache_delete_many(keys)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def changed(update_fields: Optional[Sequence[str]], fields: List[str]) -> bool:
|
|
|
|
if update_fields is None:
|
2019-05-16 14:48:42 +02:00
|
|
|
# adds/deletes should invalidate the cache
|
|
|
|
return True
|
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
update_fields_set = set(update_fields)
|
|
|
|
return any(f in update_fields_set for f in fields)
|
2019-05-16 14:48:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-15 01:16:00 +01:00
|
|
|
# Called by models/users.py to flush the user_profile cache whenever we save
|
2014-01-28 17:17:06 +01:00
|
|
|
# a user_profile object
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_user_profile(
|
|
|
|
*,
|
|
|
|
instance: "UserProfile",
|
|
|
|
update_fields: Optional[Sequence[str]] = None,
|
|
|
|
**kwargs: object,
|
|
|
|
) -> None:
|
|
|
|
user_profile = instance
|
2024-03-04 21:08:53 +01:00
|
|
|
delete_user_profile_caches([user_profile], user_profile.realm_id)
|
2014-01-28 17:17:06 +01:00
|
|
|
|
2013-10-23 23:16:39 +02:00
|
|
|
# Invalidate our active_users_in_realm info dict if any user has changed
|
2016-04-27 23:44:26 +02:00
|
|
|
# the fields in the dict or become (in)active
|
2021-07-16 00:45:17 +02:00
|
|
|
if changed(update_fields, realm_user_dict_fields):
|
2017-10-21 18:20:49 +02:00
|
|
|
cache_delete(realm_user_dicts_cache_key(user_profile.realm_id))
|
2013-10-23 23:16:39 +02:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
if changed(update_fields, ["is_active"]):
|
2017-09-16 21:44:03 +02:00
|
|
|
cache_delete(active_user_ids_cache_key(user_profile.realm_id))
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
|
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
if changed(update_fields, ["role"]):
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
|
2017-09-16 21:44:03 +02:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
if changed(update_fields, ["email", "full_name", "id", "is_mirror_dummy"]):
|
2017-01-23 12:11:04 +01:00
|
|
|
delete_display_recipient_cache(user_profile)
|
|
|
|
|
2017-02-06 20:45:26 +01:00
|
|
|
# Invalidate our bots_in_realm info dict if any bot has
|
2016-04-27 23:57:38 +02:00
|
|
|
# changed the fields in the dict or become (in)active
|
2021-07-16 00:45:17 +02:00
|
|
|
if user_profile.is_bot and changed(update_fields, bot_dict_fields):
|
2023-06-08 21:42:16 +02:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(user_profile.realm_id))
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_muting_users_cache(*, instance: "MutedUser", **kwargs: object) -> None:
|
|
|
|
mute_object = instance
|
2021-06-06 04:47:36 +02:00
|
|
|
cache_delete(get_muting_users_cache_key(mute_object.muted_user_id))
|
2021-03-27 13:31:26 +01:00
|
|
|
|
|
|
|
|
2023-12-15 02:14:24 +01:00
|
|
|
# Called by models/realms.py to flush various caches whenever we save
|
2014-01-28 18:18:19 +01:00
|
|
|
# a Realm object. The main tricky thing here is that Realm info is
|
|
|
|
# generally cached indirectly through user_profile objects.
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_realm(
|
|
|
|
*,
|
|
|
|
instance: "Realm",
|
|
|
|
update_fields: Optional[Sequence[str]] = None,
|
|
|
|
from_deletion: bool = False,
|
|
|
|
**kwargs: object,
|
|
|
|
) -> None:
|
|
|
|
realm = instance
|
2014-01-28 20:05:52 +01:00
|
|
|
users = realm.get_active_users()
|
2024-03-04 21:08:53 +01:00
|
|
|
delete_user_profile_caches(users, realm.id)
|
2014-01-28 18:03:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
from_deletion
|
|
|
|
or realm.deactivated
|
2021-07-16 00:45:17 +02:00
|
|
|
or (update_fields is not None and "string_id" in update_fields)
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2017-10-21 18:20:49 +02:00
|
|
|
cache_delete(realm_user_dicts_cache_key(realm.id))
|
2017-09-16 21:44:03 +02:00
|
|
|
cache_delete(active_user_ids_cache_key(realm.id))
|
2023-06-08 21:42:16 +02:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(realm.id))
|
2023-06-08 22:17:05 +02:00
|
|
|
cache_delete(realm_alert_words_cache_key(realm.id))
|
|
|
|
cache_delete(realm_alert_words_automaton_cache_key(realm.id))
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(realm.id))
|
2019-03-18 08:42:13 +01:00
|
|
|
cache_delete(realm_rendered_description_cache_key(realm))
|
2019-05-16 14:33:00 +02:00
|
|
|
cache_delete(realm_text_description_cache_key(realm))
|
2021-07-16 00:45:17 +02:00
|
|
|
elif changed(update_fields, ["description"]):
|
2019-05-16 14:48:42 +02:00
|
|
|
cache_delete(realm_rendered_description_cache_key(realm))
|
|
|
|
cache_delete(realm_text_description_cache_key(realm))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 22:17:05 +02:00
|
|
|
def realm_alert_words_cache_key(realm_id: int) -> str:
|
|
|
|
return f"realm_alert_words:{realm_id}"
|
2018-01-04 13:49:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-08 22:17:05 +02:00
|
|
|
def realm_alert_words_automaton_cache_key(realm_id: int) -> str:
|
|
|
|
return f"realm_alert_words_automaton:{realm_id}"
|
2019-02-11 15:19:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def realm_rendered_description_cache_key(realm: "Realm") -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"realm_rendered_description:{realm.string_id}"
|
2019-03-18 08:42:13 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def realm_text_description_cache_key(realm: "Realm") -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"realm_text_description:{realm.string_id}"
|
2019-04-24 04:30:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-15 03:57:04 +01:00
|
|
|
# Called by models/streams.py to flush the stream cache whenever we save a stream
|
2014-01-15 22:48:27 +01:00
|
|
|
# object.
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_stream(
|
|
|
|
*,
|
|
|
|
instance: "Stream",
|
|
|
|
update_fields: Optional[Sequence[str]] = None,
|
|
|
|
**kwargs: object,
|
|
|
|
) -> None:
|
2014-02-26 00:12:14 +01:00
|
|
|
from zerver.models import UserProfile
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
stream = instance
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2024-03-01 02:50:10 +01:00
|
|
|
if update_fields is None or (
|
|
|
|
"name" in update_fields
|
2021-02-12 08:19:30 +01:00
|
|
|
and UserProfile.objects.filter(
|
|
|
|
Q(default_sending_stream=stream) | Q(default_events_register_stream=stream)
|
|
|
|
).exists()
|
|
|
|
):
|
2023-06-08 21:42:16 +02:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(stream.realm_id))
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_used_upload_space_cache(
|
|
|
|
*,
|
|
|
|
instance: "Attachment",
|
2022-04-27 04:23:43 +02:00
|
|
|
created: bool = True,
|
2021-07-16 00:45:17 +02:00
|
|
|
**kwargs: object,
|
|
|
|
) -> None:
|
|
|
|
attachment = instance
|
2019-01-14 07:46:31 +01:00
|
|
|
|
2022-04-27 04:23:43 +02:00
|
|
|
if created:
|
2023-06-08 21:46:38 +02:00
|
|
|
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm_id))
|
2019-01-14 07:46:31 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def to_dict_cache_key_id(message_id: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"message_dict:{message_id}"
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def to_dict_cache_key(message: "Message", realm_id: Optional[int] = None) -> str:
|
2017-10-20 20:29:49 +02:00
|
|
|
return to_dict_cache_key_id(message.id)
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-30 01:21:40 +01:00
|
|
|
def open_graph_description_cache_key(content: bytes, request: HttpRequest) -> str:
|
2023-07-19 00:44:51 +02:00
|
|
|
return "open_graph_description_path:{}".format(
|
|
|
|
hashlib.sha1(request.META["PATH_INFO"].encode()).hexdigest()
|
|
|
|
)
|
2019-01-27 07:18:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_message(*, instance: "Message", **kwargs: object) -> None:
|
|
|
|
message = instance
|
2017-10-20 20:29:49 +02:00
|
|
|
cache_delete(to_dict_cache_key_id(message.id))
|
2018-01-12 08:57:10 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-16 00:45:17 +02:00
|
|
|
def flush_submessage(*, instance: "SubMessage", **kwargs: object) -> None:
|
|
|
|
submessage = instance
|
2018-02-11 14:09:17 +01:00
|
|
|
# submessages are not cached directly, they are part of their
|
|
|
|
# parent messages
|
|
|
|
message_id = submessage.message_id
|
|
|
|
cache_delete(to_dict_cache_key_id(message_id))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-26 20:21:11 +02:00
|
|
|
class IgnoreUnhashableLruCacheWrapper(Generic[ParamT, ReturnT]):
|
|
|
|
def __init__(
|
|
|
|
self, function: Callable[ParamT, ReturnT], cached_function: "_lru_cache_wrapper[ReturnT]"
|
2022-11-16 06:10:54 +01:00
|
|
|
) -> None:
|
2022-06-26 20:21:11 +02:00
|
|
|
self.key_prefix = KEY_PREFIX
|
|
|
|
self.function = function
|
|
|
|
self.cached_function = cached_function
|
|
|
|
self.cache_info = cached_function.cache_info
|
|
|
|
self.cache_clear = cached_function.cache_clear
|
|
|
|
|
|
|
|
def __call__(self, *args: ParamT.args, **kwargs: ParamT.kwargs) -> ReturnT:
|
2022-11-07 16:49:19 +01:00
|
|
|
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
|
|
|
|
# In the development environment, we want every file
|
|
|
|
# change to refresh the source files from disk.
|
|
|
|
return self.function(*args, **kwargs)
|
|
|
|
|
2022-06-26 20:21:11 +02:00
|
|
|
if self.key_prefix != KEY_PREFIX:
|
|
|
|
# Clear cache when cache.KEY_PREFIX changes. This is used in
|
|
|
|
# tests.
|
|
|
|
self.cache_clear()
|
|
|
|
self.key_prefix = KEY_PREFIX
|
|
|
|
|
|
|
|
try:
|
|
|
|
return self.cached_function(
|
2023-12-05 18:45:07 +01:00
|
|
|
*args,
|
|
|
|
**kwargs, # type: ignore[arg-type] # might be unhashable
|
2022-06-26 20:21:11 +02:00
|
|
|
)
|
|
|
|
except TypeError:
|
|
|
|
# args or kwargs contains an element which is unhashable. In
|
|
|
|
# this case we don't cache the result.
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Deliberately calling this function from outside of exception
|
|
|
|
# handler to get a more descriptive traceback. Otherwise traceback
|
|
|
|
# can include the exception from cached_function as well.
|
|
|
|
return self.function(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2021-02-15 23:56:05 +01:00
|
|
|
def ignore_unhashable_lru_cache(
|
|
|
|
maxsize: int = 128, typed: bool = False
|
2022-06-26 20:21:11 +02:00
|
|
|
) -> Callable[[Callable[ParamT, ReturnT]], IgnoreUnhashableLruCacheWrapper[ParamT, ReturnT]]:
|
2018-01-12 08:57:10 +01:00
|
|
|
"""
|
|
|
|
This is a wrapper over lru_cache function. It adds following features on
|
|
|
|
top of lru_cache:
|
|
|
|
|
|
|
|
* It will not cache result of functions with unhashable arguments.
|
|
|
|
* It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
|
|
|
|
"""
|
|
|
|
internal_decorator = lru_cache(maxsize=maxsize, typed=typed)
|
|
|
|
|
2022-06-26 20:21:11 +02:00
|
|
|
def decorator(
|
2023-12-05 18:45:07 +01:00
|
|
|
user_function: Callable[ParamT, ReturnT],
|
2022-06-26 20:21:11 +02:00
|
|
|
) -> IgnoreUnhashableLruCacheWrapper[ParamT, ReturnT]:
|
|
|
|
return IgnoreUnhashableLruCacheWrapper(user_function, internal_decorator(user_function))
|
2018-01-12 08:57:10 +01:00
|
|
|
|
|
|
|
return decorator
|
2019-04-18 04:35:14 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-18 04:35:14 +02:00
|
|
|
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]:
|
|
|
|
"""Wrapper that converts any dict args to dict item tuples."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-18 04:35:14 +02:00
|
|
|
def dict_to_tuple(arg: Any) -> Any:
|
|
|
|
if isinstance(arg, dict):
|
|
|
|
return tuple(sorted(arg.items()))
|
|
|
|
return arg
|
|
|
|
|
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
new_args = (dict_to_tuple(arg) for arg in args)
|
|
|
|
return user_function(*new_args, **kwargs)
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-18 04:35:14 +02:00
|
|
|
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]:
|
|
|
|
"""Wrapper that converts any dict items tuple args to dicts."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-18 04:35:14 +02:00
|
|
|
def dict_items_to_dict(arg: Any) -> Any:
|
|
|
|
if isinstance(arg, tuple):
|
|
|
|
try:
|
|
|
|
return dict(arg)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
return arg
|
|
|
|
|
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
new_args = (dict_items_to_dict(arg) for arg in args)
|
|
|
|
new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()}
|
|
|
|
return user_function(*new_args, **new_kwargs)
|
|
|
|
|
|
|
|
return wrapper
|