2018-07-31 23:07:42 +02:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
|
2012-11-02 00:23:26 +01:00
|
|
|
from functools import wraps
|
|
|
|
|
2018-01-12 08:57:10 +01:00
|
|
|
from django.utils.lru_cache import lru_cache
|
2013-01-09 20:35:19 +01:00
|
|
|
from django.core.cache import cache as djcache
|
2016-10-10 15:09:41 +02:00
|
|
|
from django.core.cache import caches
|
2013-05-30 21:02:12 +02:00
|
|
|
from django.conf import settings
|
2014-02-26 00:12:14 +01:00
|
|
|
from django.db.models import Q
|
2016-06-11 12:18:44 +02:00
|
|
|
from django.core.cache.backends.base import BaseCache
|
2019-01-27 07:18:19 +01:00
|
|
|
from django.http import HttpRequest
|
2012-09-19 18:41:20 +02:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
from typing import Any, Callable, Dict, Iterable, List, \
|
2019-08-10 23:31:14 +02:00
|
|
|
Optional, Sequence, TypeVar, Tuple, TYPE_CHECKING
|
2016-04-21 05:56:40 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.utils import statsd, statsd_key, make_safe_digest
|
2013-04-19 00:00:33 +02:00
|
|
|
import time
|
2013-05-30 21:02:12 +02:00
|
|
|
import base64
|
2019-12-16 05:53:54 +01:00
|
|
|
import logging
|
2013-05-30 21:02:12 +02:00
|
|
|
import random
|
2019-12-16 05:53:54 +01:00
|
|
|
import re
|
2013-05-30 21:02:12 +02:00
|
|
|
import sys
|
2019-12-16 05:53:54 +01:00
|
|
|
import traceback
|
2013-05-30 21:02:12 +02:00
|
|
|
import os
|
|
|
|
import hashlib
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
if TYPE_CHECKING:
|
2016-06-10 18:06:39 +02:00
|
|
|
# These modules have to be imported for type annotations but
|
|
|
|
# they cannot be imported at runtime due to cyclic dependency.
|
2018-12-17 20:14:47 +01:00
|
|
|
from zerver.models import UserProfile, Realm, Message
|
2016-06-10 18:06:39 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
MEMCACHED_MAX_KEY_LENGTH = 250
|
|
|
|
|
2017-10-24 04:52:04 +02:00
|
|
|
ReturnT = TypeVar('ReturnT') # Useful for matching return types via Callable[..., ReturnT]
|
2016-07-22 15:10:19 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
logger = logging.getLogger()
|
|
|
|
|
2016-10-27 12:06:44 +02:00
|
|
|
class NotFoundInCache(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-03-31 03:23:21 +02:00
|
|
|
remote_cache_time_start = 0.0
|
2016-03-31 03:25:46 +02:00
|
|
|
remote_cache_total_time = 0.0
|
|
|
|
remote_cache_total_requests = 0
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_remote_cache_time() -> float:
|
2016-03-31 03:25:46 +02:00
|
|
|
return remote_cache_total_time
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_remote_cache_requests() -> int:
|
2016-03-31 03:25:46 +02:00
|
|
|
return remote_cache_total_requests
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remote_cache_stats_start() -> None:
|
2016-03-31 03:23:21 +02:00
|
|
|
global remote_cache_time_start
|
|
|
|
remote_cache_time_start = time.time()
|
2013-05-10 16:57:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remote_cache_stats_finish() -> None:
|
2016-03-31 03:25:46 +02:00
|
|
|
global remote_cache_total_time
|
|
|
|
global remote_cache_total_requests
|
2016-03-31 03:23:21 +02:00
|
|
|
global remote_cache_time_start
|
2016-03-31 03:25:46 +02:00
|
|
|
remote_cache_total_requests += 1
|
|
|
|
remote_cache_total_time += (time.time() - remote_cache_time_start)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_or_create_key_prefix() -> str:
|
2016-12-15 07:02:42 +01:00
|
|
|
if settings.CASPER_TESTS:
|
2016-11-21 09:18:50 +01:00
|
|
|
# This sets the prefix for the benefit of the Casper tests.
|
|
|
|
#
|
|
|
|
# Having a fixed key is OK since we don't support running
|
|
|
|
# multiple copies of the casper tests at the same time anyway.
|
2017-11-03 03:12:25 +01:00
|
|
|
return 'casper_tests:'
|
2016-12-15 07:02:42 +01:00
|
|
|
elif settings.TEST_SUITE:
|
|
|
|
# The Python tests overwrite KEY_PREFIX on each test, but use
|
|
|
|
# this codepath as well, just to save running the more complex
|
|
|
|
# code below for reading the normal key prefix.
|
2017-11-03 03:12:25 +01:00
|
|
|
return 'django_tests_unused:'
|
2016-12-15 07:02:42 +01:00
|
|
|
|
2016-07-14 05:21:59 +02:00
|
|
|
# directory `var` should exist in production
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(os.path.join(settings.DEPLOY_ROOT, "var"), exist_ok=True)
|
2013-07-03 01:26:00 +02:00
|
|
|
|
2016-07-14 05:21:59 +02:00
|
|
|
filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix")
|
2013-05-30 21:02:12 +02:00
|
|
|
try:
|
2015-11-01 17:10:29 +01:00
|
|
|
fd = os.open(filename, os.O_CREAT | os.O_EXCL | os.O_RDWR, 0o444)
|
2018-05-11 01:40:23 +02:00
|
|
|
random_hash = hashlib.sha256(str(random.getrandbits(256)).encode('utf-8')).digest()
|
2016-06-09 22:40:04 +02:00
|
|
|
prefix = base64.b16encode(random_hash)[:32].decode('utf-8').lower() + ':'
|
2013-05-30 21:02:12 +02:00
|
|
|
# This does close the underlying file
|
|
|
|
with os.fdopen(fd, 'w') as f:
|
|
|
|
f.write(prefix + "\n")
|
|
|
|
except OSError:
|
|
|
|
# The file already exists
|
|
|
|
tries = 1
|
|
|
|
while tries < 10:
|
2015-10-14 22:31:08 +02:00
|
|
|
with open(filename, 'r') as f:
|
2013-05-30 21:02:12 +02:00
|
|
|
prefix = f.readline()[:-1]
|
|
|
|
if len(prefix) == 33:
|
|
|
|
break
|
|
|
|
tries += 1
|
|
|
|
prefix = ''
|
|
|
|
time.sleep(0.5)
|
|
|
|
|
|
|
|
if not prefix:
|
2016-03-31 03:32:06 +02:00
|
|
|
print("Could not read remote cache key prefix file")
|
2016-01-26 00:56:46 +01:00
|
|
|
sys.exit(1)
|
2013-05-30 21:02:12 +02:00
|
|
|
|
|
|
|
return prefix
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
KEY_PREFIX = get_or_create_key_prefix() # type: str
|
2013-05-30 21:02:12 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def bounce_key_prefix_for_testing(test_name: str) -> None:
|
2013-07-02 19:10:50 +02:00
|
|
|
global KEY_PREFIX
|
2018-05-11 01:40:23 +02:00
|
|
|
KEY_PREFIX = test_name + ':' + str(os.getpid()) + ':'
|
2017-05-05 15:19:11 +02:00
|
|
|
# We are taking the hash of the KEY_PREFIX to decrease the size of the key.
|
2019-12-16 05:53:54 +01:00
|
|
|
# Memcached keys should have a length of less than 250.
|
2019-03-18 18:56:50 +01:00
|
|
|
KEY_PREFIX = hashlib.sha1(KEY_PREFIX.encode('utf-8')).hexdigest() + ":"
|
2013-07-02 19:10:50 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
|
2013-06-19 19:44:03 +02:00
|
|
|
if cache_name is None:
|
|
|
|
return djcache
|
2016-10-10 15:09:41 +02:00
|
|
|
return caches[cache_name]
|
2013-06-19 19:44:03 +02:00
|
|
|
|
2018-03-16 17:50:41 +01:00
|
|
|
def get_cache_with_key(
|
2018-05-11 01:40:23 +02:00
|
|
|
keyfunc: Callable[..., str],
|
2018-03-16 17:50:41 +01:00
|
|
|
cache_name: Optional[str]=None
|
|
|
|
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
|
2016-10-27 12:06:44 +02:00
|
|
|
"""
|
|
|
|
The main goal of this function getting value from the cache like in the "cache_with_key".
|
|
|
|
A cache value can contain any data including the "None", so
|
|
|
|
here used exception for case if value isn't found in the cache.
|
|
|
|
"""
|
2018-03-16 17:50:41 +01:00
|
|
|
def decorator(func: Callable[..., ReturnT]) -> (Callable[..., ReturnT]):
|
2016-10-27 12:06:44 +02:00
|
|
|
@wraps(func)
|
2018-03-16 17:50:41 +01:00
|
|
|
def func_with_caching(*args: Any, **kwargs: Any) -> Callable[..., ReturnT]:
|
2016-10-27 12:06:44 +02:00
|
|
|
key = keyfunc(*args, **kwargs)
|
2019-12-16 05:53:54 +01:00
|
|
|
try:
|
|
|
|
val = cache_get(key, cache_name=cache_name)
|
|
|
|
except InvalidCacheKeyException:
|
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
log_invalid_cache_keys(stack_trace, [key])
|
|
|
|
val = None
|
|
|
|
|
2016-10-27 12:06:44 +02:00
|
|
|
if val is not None:
|
|
|
|
return val[0]
|
|
|
|
raise NotFoundInCache()
|
|
|
|
|
|
|
|
return func_with_caching
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2018-03-16 17:50:41 +01:00
|
|
|
def cache_with_key(
|
2018-05-11 01:40:23 +02:00
|
|
|
keyfunc: Callable[..., str], cache_name: Optional[str]=None,
|
2018-03-16 17:50:41 +01:00
|
|
|
timeout: Optional[int]=None, with_statsd_key: Optional[str]=None
|
|
|
|
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
|
2012-09-19 18:41:20 +02:00
|
|
|
"""Decorator which applies Django caching to a function.
|
|
|
|
|
|
|
|
Decorator argument is a function which computes a cache key
|
|
|
|
from the original function's arguments. You are responsible
|
|
|
|
for avoiding collisions with other uses of this decorator or
|
|
|
|
other uses of caching."""
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def decorator(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
|
2012-11-02 00:23:26 +01:00
|
|
|
@wraps(func)
|
2017-11-05 11:15:10 +01:00
|
|
|
def func_with_caching(*args: Any, **kwargs: Any) -> ReturnT:
|
2013-06-20 16:41:23 +02:00
|
|
|
key = keyfunc(*args, **kwargs)
|
2013-05-10 16:57:06 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
try:
|
|
|
|
val = cache_get(key, cache_name=cache_name)
|
|
|
|
except InvalidCacheKeyException:
|
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
log_invalid_cache_keys(stack_trace, [key])
|
|
|
|
return func(*args, **kwargs)
|
2012-09-19 18:41:20 +02:00
|
|
|
|
2013-05-11 15:50:02 +02:00
|
|
|
extra = ""
|
|
|
|
if cache_name == 'database':
|
|
|
|
extra = ".dbcache"
|
|
|
|
|
|
|
|
if with_statsd_key is not None:
|
|
|
|
metric_key = with_statsd_key
|
2013-04-16 22:58:21 +02:00
|
|
|
else:
|
2013-05-11 15:50:02 +02:00
|
|
|
metric_key = statsd_key(key)
|
|
|
|
|
|
|
|
status = "hit" if val is not None else "miss"
|
|
|
|
statsd.incr("cache%s.%s.%s" % (extra, metric_key, status))
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2012-09-19 18:41:20 +02:00
|
|
|
# Values are singleton tuples so that we can distinguish
|
|
|
|
# a result of None from a missing key.
|
|
|
|
if val is not None:
|
|
|
|
return val[0]
|
|
|
|
|
|
|
|
val = func(*args, **kwargs)
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2013-06-18 21:08:16 +02:00
|
|
|
cache_set(key, val, cache_name=cache_name, timeout=timeout)
|
2013-04-19 00:00:33 +02:00
|
|
|
|
2012-09-19 18:41:20 +02:00
|
|
|
return val
|
|
|
|
|
|
|
|
return func_with_caching
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
class InvalidCacheKeyException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def log_invalid_cache_keys(stack_trace: str, key: List[str]) -> None:
|
|
|
|
logger.warning(
|
|
|
|
"Invalid cache key used: {}\nStack trace: {}\n".format(key, stack_trace)
|
|
|
|
)
|
|
|
|
|
|
|
|
def validate_cache_key(key: str) -> None:
|
|
|
|
if not key.startswith(KEY_PREFIX):
|
|
|
|
key = KEY_PREFIX + key
|
|
|
|
|
|
|
|
# Theoretically memcached can handle non-ascii characters
|
|
|
|
# and only "control" characters are strictly disallowed, see:
|
|
|
|
# https://github.com/memcached/memcached/blob/master/doc/protocol.txt
|
|
|
|
# However, limiting the characters we allow in keys simiplifies things,
|
|
|
|
# and anyway we use make_safe_digest when forming some keys to ensure
|
|
|
|
# the resulting keys fit the regex below.
|
|
|
|
# The regex checks "all characters between ! and ~ in the ascii table",
|
|
|
|
# which happens to be the set of all "nice" ascii characters.
|
|
|
|
if not bool(re.fullmatch(r"([!-~])+", key)):
|
|
|
|
raise InvalidCacheKeyException("Invalid characters in the cache key: " + key)
|
|
|
|
if len(key) > MEMCACHED_MAX_KEY_LENGTH:
|
|
|
|
raise InvalidCacheKeyException("Cache key too long: {} Length: {}".format(key, len(key)))
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_set(key: str, val: Any, cache_name: Optional[str]=None, timeout: Optional[int]=None) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
cache_backend = get_cache_backend(cache_name)
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_backend.set(final_key, (val,), timeout=timeout)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-05-30 20:01:36 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_get(key: str, cache_name: Optional[str]=None) -> Any:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
cache_backend = get_cache_backend(cache_name)
|
2019-12-16 05:53:54 +01:00
|
|
|
ret = cache_backend.get(final_key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-06-18 21:08:16 +02:00
|
|
|
return ret
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_get_many(keys: List[str], cache_name: Optional[str]=None) -> Dict[str, Any]:
|
2016-06-10 00:12:34 +02:00
|
|
|
keys = [KEY_PREFIX + key for key in keys]
|
2019-12-16 05:53:54 +01:00
|
|
|
for key in keys:
|
|
|
|
validate_cache_key(key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2013-06-19 19:44:03 +02:00
|
|
|
ret = get_cache_backend(cache_name).get_many(keys)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-06-19 19:31:55 +02:00
|
|
|
return dict([(key[len(KEY_PREFIX):], value) for key, value in ret.items()])
|
2013-04-22 16:29:57 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def safe_cache_get_many(keys: List[str], cache_name: Optional[str]=None) -> Dict[str, Any]:
|
|
|
|
"""Variant of cache_get_many that drops any keys that fail
|
|
|
|
validation, rather than throwing an exception visible to the
|
|
|
|
caller."""
|
|
|
|
try:
|
|
|
|
# Almost always the keys will all be correct, so we just try
|
|
|
|
# to do normal cache_get_many to avoid the overhead of
|
|
|
|
# validating all the keys here.
|
|
|
|
return cache_get_many(keys, cache_name)
|
|
|
|
except InvalidCacheKeyException:
|
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
good_keys, bad_keys = filter_good_and_bad_keys(keys)
|
|
|
|
|
|
|
|
log_invalid_cache_keys(stack_trace, bad_keys)
|
|
|
|
return cache_get_many(good_keys, cache_name)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_set_many(items: Dict[str, Any], cache_name: Optional[str]=None,
|
2017-11-05 11:15:10 +01:00
|
|
|
timeout: Optional[int]=None) -> None:
|
2013-05-30 21:02:12 +02:00
|
|
|
new_items = {}
|
|
|
|
for key in items:
|
2019-12-16 05:53:54 +01:00
|
|
|
new_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(new_key)
|
|
|
|
new_items[new_key] = items[key]
|
2016-06-10 00:12:34 +02:00
|
|
|
items = new_items
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2016-06-09 23:35:53 +02:00
|
|
|
get_cache_backend(cache_name).set_many(items, timeout=timeout)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-04-25 20:41:54 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def safe_cache_set_many(items: Dict[str, Any], cache_name: Optional[str]=None,
|
|
|
|
timeout: Optional[int]=None) -> None:
|
|
|
|
"""Variant of cache_set_many that drops saving any keys that fail
|
|
|
|
validation, rather than throwing an exception visible to the
|
|
|
|
caller."""
|
|
|
|
try:
|
|
|
|
# Almost always the keys will all be correct, so we just try
|
|
|
|
# to do normal cache_set_many to avoid the overhead of
|
|
|
|
# validating all the keys here.
|
|
|
|
return cache_set_many(items, cache_name, timeout)
|
|
|
|
except InvalidCacheKeyException:
|
|
|
|
stack_trace = traceback.format_exc()
|
|
|
|
|
|
|
|
good_keys, bad_keys = filter_good_and_bad_keys(list(items.keys()))
|
|
|
|
log_invalid_cache_keys(stack_trace, bad_keys)
|
|
|
|
|
|
|
|
good_items = dict((key, items[key]) for key in good_keys)
|
|
|
|
return cache_set_many(good_items, cache_name, timeout)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_delete(key: str, cache_name: Optional[str]=None) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
final_key = KEY_PREFIX + key
|
|
|
|
validate_cache_key(final_key)
|
|
|
|
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2019-12-16 05:53:54 +01:00
|
|
|
get_cache_backend(cache_name).delete(final_key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-08-28 00:19:54 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def cache_delete_many(items: Iterable[str], cache_name: Optional[str]=None) -> None:
|
2019-12-16 05:53:54 +01:00
|
|
|
keys = [KEY_PREFIX + item for item in items]
|
|
|
|
for key in keys:
|
|
|
|
validate_cache_key(key)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_start()
|
2019-12-16 05:53:54 +01:00
|
|
|
get_cache_backend(cache_name).delete_many(keys)
|
2016-03-31 03:25:05 +02:00
|
|
|
remote_cache_stats_finish()
|
2013-08-22 16:45:38 +02:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
def filter_good_and_bad_keys(keys: List[str]) -> Tuple[List[str], List[str]]:
|
|
|
|
good_keys = []
|
|
|
|
bad_keys = []
|
|
|
|
for key in keys:
|
|
|
|
try:
|
|
|
|
validate_cache_key(key)
|
|
|
|
good_keys.append(key)
|
|
|
|
except InvalidCacheKeyException:
|
|
|
|
bad_keys.append(key)
|
|
|
|
|
|
|
|
return good_keys, bad_keys
|
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
# Generic_bulk_cached fetch and its helpers. We start with declaring
|
|
|
|
# a few type variables that help define its interface.
|
|
|
|
|
|
|
|
# Type for the cache's keys; will typically be int or str.
|
2017-10-28 18:57:57 +02:00
|
|
|
ObjKT = TypeVar('ObjKT')
|
2019-08-08 21:34:06 +02:00
|
|
|
|
|
|
|
# Type for items to be fetched from the database (e.g. a Django model object)
|
2017-10-28 18:57:57 +02:00
|
|
|
ItemT = TypeVar('ItemT')
|
2019-08-08 21:34:06 +02:00
|
|
|
|
|
|
|
# Type for items to be stored in the cache (e.g. a dictionary serialization).
|
|
|
|
# Will equal ItemT unless a cache_transformer is specified.
|
|
|
|
CacheItemT = TypeVar('CacheItemT')
|
|
|
|
|
|
|
|
# Type for compressed items for storage in the cache. For
|
|
|
|
# serializable objects, will be the object; if encoded, bytes.
|
2017-10-28 18:57:57 +02:00
|
|
|
CompressedItemT = TypeVar('CompressedItemT')
|
|
|
|
|
|
|
|
def default_extractor(obj: CompressedItemT) -> ItemT:
|
|
|
|
return obj # type: ignore # Need a type assert that ItemT=CompressedItemT
|
|
|
|
|
|
|
|
def default_setter(obj: ItemT) -> CompressedItemT:
|
|
|
|
return obj # type: ignore # Need a type assert that ItemT=CompressedItemT
|
|
|
|
|
|
|
|
def default_id_fetcher(obj: ItemT) -> ObjKT:
|
|
|
|
return obj.id # type: ignore # Need ItemT/CompressedItemT typevars to be a Django protocol
|
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
def default_cache_transformer(obj: ItemT) -> CacheItemT:
|
|
|
|
return obj # type: ignore # Need a type assert that ItemT=CacheItemT
|
2017-10-28 19:00:43 +02:00
|
|
|
|
2013-06-27 21:55:42 +02:00
|
|
|
# Required Arguments are as follows:
|
|
|
|
# * object_ids: The list of object ids to look up
|
|
|
|
# * cache_key_function: object_id => cache key
|
|
|
|
# * query_function: [object_ids] => [objects from database]
|
|
|
|
# Optional keyword arguments:
|
|
|
|
# * setter: Function to call before storing items to cache (e.g. compression)
|
|
|
|
# * extractor: Function to call on items returned from cache
|
|
|
|
# (e.g. decompression). Should be the inverse of the setter
|
|
|
|
# function.
|
|
|
|
# * id_fetcher: Function mapping an object from database => object_id
|
|
|
|
# (in case we're using a key more complex than obj.id)
|
|
|
|
# * cache_transformer: Function mapping an object from database =>
|
|
|
|
# value for cache (in case the values that we're caching are some
|
|
|
|
# function of the objects, not the objects themselves)
|
2017-10-28 18:57:57 +02:00
|
|
|
def generic_bulk_cached_fetch(
|
2018-05-11 01:40:23 +02:00
|
|
|
cache_key_function: Callable[[ObjKT], str],
|
2019-08-08 21:34:06 +02:00
|
|
|
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
|
2019-08-10 23:31:14 +02:00
|
|
|
object_ids: Sequence[ObjKT],
|
2019-08-08 21:34:06 +02:00
|
|
|
extractor: Callable[[CompressedItemT], CacheItemT] = default_extractor,
|
|
|
|
setter: Callable[[CacheItemT], CompressedItemT] = default_setter,
|
2017-10-28 18:57:57 +02:00
|
|
|
id_fetcher: Callable[[ItemT], ObjKT] = default_id_fetcher,
|
2019-08-08 21:34:06 +02:00
|
|
|
cache_transformer: Callable[[ItemT], CacheItemT] = default_cache_transformer,
|
|
|
|
) -> Dict[ObjKT, CacheItemT]:
|
2019-08-10 23:31:14 +02:00
|
|
|
if len(object_ids) == 0:
|
|
|
|
# Nothing to fetch.
|
|
|
|
return {}
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
cache_keys = {} # type: Dict[ObjKT, str]
|
2013-06-27 21:55:42 +02:00
|
|
|
for object_id in object_ids:
|
|
|
|
cache_keys[object_id] = cache_key_function(object_id)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
cached_objects_compressed = safe_cache_get_many([cache_keys[object_id]
|
|
|
|
for object_id in object_ids]) # type: Dict[str, Tuple[CompressedItemT]]
|
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
cached_objects = {} # type: Dict[str, CacheItemT]
|
2017-10-28 19:01:44 +02:00
|
|
|
for (key, val) in cached_objects_compressed.items():
|
|
|
|
cached_objects[key] = extractor(cached_objects_compressed[key][0])
|
2013-06-27 21:55:42 +02:00
|
|
|
needed_ids = [object_id for object_id in object_ids if
|
|
|
|
cache_keys[object_id] not in cached_objects]
|
2019-08-10 23:31:14 +02:00
|
|
|
|
|
|
|
# Only call query_function if there are some ids to fetch from the database:
|
|
|
|
if len(needed_ids) > 0:
|
|
|
|
db_objects = query_function(needed_ids)
|
|
|
|
else:
|
|
|
|
db_objects = []
|
2013-06-27 21:55:42 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
items_for_remote_cache = {} # type: Dict[str, Tuple[CompressedItemT]]
|
2013-06-27 21:55:42 +02:00
|
|
|
for obj in db_objects:
|
|
|
|
key = cache_keys[id_fetcher(obj)]
|
|
|
|
item = cache_transformer(obj)
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[key] = (setter(item),)
|
2013-06-27 21:55:42 +02:00
|
|
|
cached_objects[key] = item
|
2016-03-31 03:21:05 +02:00
|
|
|
if len(items_for_remote_cache) > 0:
|
2019-12-16 05:53:54 +01:00
|
|
|
safe_cache_set_many(items_for_remote_cache)
|
2013-06-27 21:55:42 +02:00
|
|
|
return dict((object_id, cached_objects[cache_keys[object_id]]) for object_id in object_ids
|
|
|
|
if cache_keys[object_id] in cached_objects)
|
|
|
|
|
2018-10-14 14:41:15 +02:00
|
|
|
def preview_url_cache_key(url: str) -> str:
|
2019-04-20 01:00:46 +02:00
|
|
|
return "preview_url:%s" % (make_safe_digest(url),)
|
2018-10-14 14:41:15 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def display_recipient_cache_key(recipient_id: int) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "display_recipient_dict:%d" % (recipient_id,)
|
2013-08-22 17:44:52 +02:00
|
|
|
|
2019-08-07 00:18:13 +02:00
|
|
|
def display_recipient_bulk_get_users_by_id_cache_key(user_id: int) -> str:
|
|
|
|
# Cache key function for a function for bulk fetching users, used internally
|
|
|
|
# by display_recipient code.
|
|
|
|
return 'bulk_fetch_display_recipients:' + user_profile_by_id_cache_key(user_id)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_by_email_cache_key(email: str) -> str:
|
2016-09-28 00:12:18 +02:00
|
|
|
# See the comment in zerver/lib/avatar_hash.py:gravatar_hash for why we
|
2013-03-20 15:31:27 +01:00
|
|
|
# are proactively encoding email addresses even though they will
|
|
|
|
# with high likelihood be ASCII-only for the foreseeable future.
|
2017-11-03 03:12:25 +01:00
|
|
|
return 'user_profile_by_email:%s' % (make_safe_digest(email.strip()),)
|
2013-03-13 18:49:29 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
|
2017-11-27 23:33:13 +01:00
|
|
|
return u"user_profile:%s:%s" % (make_safe_digest(email.strip()), realm_id,)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_cache_key(email: str, realm: 'Realm') -> str:
|
2017-11-27 23:33:13 +01:00
|
|
|
return user_profile_cache_key_id(email, realm.id)
|
2017-05-22 19:45:54 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def bot_profile_cache_key(email: str) -> str:
|
2019-04-20 01:00:46 +02:00
|
|
|
return "bot_profile:%s" % (make_safe_digest(email.strip()),)
|
2017-05-22 23:37:15 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "user_profile_by_id:%s" % (user_profile_id,)
|
2013-03-18 17:10:45 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def user_profile_by_api_key_cache_key(api_key: str) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "user_profile_by_api_key:%s" % (api_key,)
|
2017-08-25 07:43:38 +02:00
|
|
|
|
2017-10-21 18:20:49 +02:00
|
|
|
realm_user_dict_fields = [
|
2017-02-21 17:55:32 +01:00
|
|
|
'id', 'full_name', 'short_name', 'email',
|
2017-10-21 18:20:49 +02:00
|
|
|
'avatar_source', 'avatar_version', 'is_active',
|
2019-10-05 02:35:07 +02:00
|
|
|
'role', 'is_bot', 'realm_id', 'timezone',
|
2019-10-20 19:15:44 +02:00
|
|
|
'date_joined', 'bot_owner_id', 'delivery_email',
|
|
|
|
'bot_type'
|
2018-05-15 13:00:52 +02:00
|
|
|
] # type: List[str]
|
2017-02-21 17:55:32 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def realm_user_dicts_cache_key(realm_id: int) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "realm_user_dicts:%s" % (realm_id,)
|
2013-10-23 23:16:39 +02:00
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
def get_realm_used_upload_space_cache_key(realm: 'Realm') -> str:
|
|
|
|
return u'realm_used_upload_space:%s' % (realm.id,)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def active_user_ids_cache_key(realm_id: int) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "active_user_ids:%s" % (realm_id,)
|
2017-09-16 21:44:03 +02:00
|
|
|
|
2018-06-03 19:11:52 +02:00
|
|
|
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
|
|
|
|
return "active_non_guest_user_ids:%s" % (realm_id,)
|
|
|
|
|
2017-06-12 19:50:03 +02:00
|
|
|
bot_dict_fields = ['id', 'full_name', 'short_name', 'bot_type', 'email',
|
2017-02-06 20:45:26 +01:00
|
|
|
'is_active', 'default_sending_stream__name',
|
2017-05-10 07:09:28 +02:00
|
|
|
'realm_id',
|
2017-02-06 20:45:26 +01:00
|
|
|
'default_events_register_stream__name',
|
|
|
|
'default_all_public_streams', 'api_key',
|
|
|
|
'bot_owner__email', 'avatar_source',
|
2017-05-07 17:02:58 +02:00
|
|
|
'avatar_version'] # type: List[str]
|
2017-02-06 20:45:26 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def bot_dicts_in_realm_cache_key(realm: 'Realm') -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "bot_dicts_in_realm:%s" % (realm.id,)
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_stream_cache_key(stream_name: str, realm_id: int) -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "stream_by_realm_and_name:%s:%s" % (
|
2014-01-15 22:48:27 +01:00
|
|
|
realm_id, make_safe_digest(stream_name.strip().lower()))
|
|
|
|
|
2018-03-16 17:51:05 +01:00
|
|
|
def delete_user_profile_caches(user_profiles: Iterable['UserProfile']) -> None:
|
2018-08-01 10:53:40 +02:00
|
|
|
# Imported here to avoid cyclic dependency.
|
|
|
|
from zerver.lib.users import get_all_api_keys
|
2019-07-13 00:11:58 +02:00
|
|
|
from zerver.models import is_cross_realm_bot_email
|
2016-07-27 10:05:37 +02:00
|
|
|
keys = []
|
2014-01-28 20:05:52 +01:00
|
|
|
for user_profile in user_profiles:
|
2018-12-07 00:05:57 +01:00
|
|
|
keys.append(user_profile_by_email_cache_key(user_profile.delivery_email))
|
2016-07-27 10:05:37 +02:00
|
|
|
keys.append(user_profile_by_id_cache_key(user_profile.id))
|
2018-08-01 10:53:40 +02:00
|
|
|
for api_key in get_all_api_keys(user_profile):
|
|
|
|
keys.append(user_profile_by_api_key_cache_key(api_key))
|
2017-05-22 19:45:54 +02:00
|
|
|
keys.append(user_profile_cache_key(user_profile.email, user_profile.realm))
|
2019-07-13 00:11:58 +02:00
|
|
|
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):
|
|
|
|
# Handle clearing system bots from their special cache.
|
|
|
|
keys.append(bot_profile_cache_key(user_profile.email))
|
2016-07-27 10:05:37 +02:00
|
|
|
|
|
|
|
cache_delete_many(keys)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
2018-03-16 17:51:05 +01:00
|
|
|
def delete_display_recipient_cache(user_profile: 'UserProfile') -> None:
|
2017-01-23 12:11:04 +01:00
|
|
|
from zerver.models import Subscription # We need to import here to avoid cyclic dependency.
|
|
|
|
recipient_ids = Subscription.objects.filter(user_profile=user_profile)
|
|
|
|
recipient_ids = recipient_ids.values_list('recipient_id', flat=True)
|
|
|
|
keys = [display_recipient_cache_key(rid) for rid in recipient_ids]
|
2019-08-07 00:18:13 +02:00
|
|
|
keys.append(display_recipient_bulk_get_users_by_id_cache_key(user_profile.id))
|
2017-01-23 12:11:04 +01:00
|
|
|
cache_delete_many(keys)
|
|
|
|
|
2019-05-16 14:48:42 +02:00
|
|
|
def changed(kwargs: Any, fields: List[str]) -> bool:
|
|
|
|
if kwargs.get('update_fields') is None:
|
|
|
|
# adds/deletes should invalidate the cache
|
|
|
|
return True
|
|
|
|
|
|
|
|
update_fields = set(kwargs['update_fields'])
|
|
|
|
for f in fields:
|
|
|
|
if f in update_fields:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-01-28 17:17:06 +01:00
|
|
|
# Called by models.py to flush the user_profile cache whenever we save
|
|
|
|
# a user_profile object
|
2017-11-05 11:15:10 +01:00
|
|
|
def flush_user_profile(sender: Any, **kwargs: Any) -> None:
|
2014-01-28 17:17:06 +01:00
|
|
|
user_profile = kwargs['instance']
|
2016-07-27 10:05:37 +02:00
|
|
|
delete_user_profile_caches([user_profile])
|
2014-01-28 17:17:06 +01:00
|
|
|
|
2013-10-23 23:16:39 +02:00
|
|
|
# Invalidate our active_users_in_realm info dict if any user has changed
|
2016-04-27 23:44:26 +02:00
|
|
|
# the fields in the dict or become (in)active
|
2019-05-16 14:48:42 +02:00
|
|
|
if changed(kwargs, realm_user_dict_fields):
|
2017-10-21 18:20:49 +02:00
|
|
|
cache_delete(realm_user_dicts_cache_key(user_profile.realm_id))
|
2013-10-23 23:16:39 +02:00
|
|
|
|
2019-05-16 14:48:42 +02:00
|
|
|
if changed(kwargs, ['is_active']):
|
2017-09-16 21:44:03 +02:00
|
|
|
cache_delete(active_user_ids_cache_key(user_profile.realm_id))
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
|
|
|
|
|
2019-10-05 02:35:07 +02:00
|
|
|
if changed(kwargs, ['role']):
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
|
2017-09-16 21:44:03 +02:00
|
|
|
|
2019-05-16 14:48:42 +02:00
|
|
|
if changed(kwargs, ['email', 'full_name', 'short_name', 'id', 'is_mirror_dummy']):
|
2017-01-23 12:11:04 +01:00
|
|
|
delete_display_recipient_cache(user_profile)
|
|
|
|
|
2017-02-06 20:45:26 +01:00
|
|
|
# Invalidate our bots_in_realm info dict if any bot has
|
2016-04-27 23:57:38 +02:00
|
|
|
# changed the fields in the dict or become (in)active
|
2019-05-16 14:48:42 +02:00
|
|
|
if user_profile.is_bot and changed(kwargs, bot_dict_fields):
|
2017-02-06 20:45:26 +01:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(user_profile.realm))
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2013-09-06 20:50:25 +02:00
|
|
|
# Invalidate realm-wide alert words cache if any user in the realm has changed
|
|
|
|
# alert words
|
2019-05-16 14:48:42 +02:00
|
|
|
if changed(kwargs, ['alert_words']):
|
2013-10-23 23:16:39 +02:00
|
|
|
cache_delete(realm_alert_words_cache_key(user_profile.realm))
|
2019-02-11 15:19:38 +01:00
|
|
|
cache_delete(realm_alert_words_automaton_cache_key(user_profile.realm))
|
2013-09-06 20:50:25 +02:00
|
|
|
|
2014-01-28 18:18:19 +01:00
|
|
|
# Called by models.py to flush various caches whenever we save
|
|
|
|
# a Realm object. The main tricky thing here is that Realm info is
|
|
|
|
# generally cached indirectly through user_profile objects.
|
2017-11-05 11:15:10 +01:00
|
|
|
def flush_realm(sender: Any, **kwargs: Any) -> None:
|
2014-01-28 18:18:19 +01:00
|
|
|
realm = kwargs['instance']
|
2014-01-28 20:05:52 +01:00
|
|
|
users = realm.get_active_users()
|
2016-07-27 10:05:37 +02:00
|
|
|
delete_user_profile_caches(users)
|
2014-01-28 18:03:06 +01:00
|
|
|
|
2018-11-15 23:29:04 +01:00
|
|
|
if realm.deactivated or (kwargs["update_fields"] is not None and
|
|
|
|
"string_id" in kwargs['update_fields']):
|
2017-10-21 18:20:49 +02:00
|
|
|
cache_delete(realm_user_dicts_cache_key(realm.id))
|
2017-09-16 21:44:03 +02:00
|
|
|
cache_delete(active_user_ids_cache_key(realm.id))
|
2017-02-06 20:45:26 +01:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(realm))
|
2014-01-28 18:03:06 +01:00
|
|
|
cache_delete(realm_alert_words_cache_key(realm))
|
2019-02-11 15:19:38 +01:00
|
|
|
cache_delete(realm_alert_words_automaton_cache_key(realm))
|
2018-06-03 19:11:52 +02:00
|
|
|
cache_delete(active_non_guest_user_ids_cache_key(realm.id))
|
2019-03-18 08:42:13 +01:00
|
|
|
cache_delete(realm_rendered_description_cache_key(realm))
|
2019-05-16 14:33:00 +02:00
|
|
|
cache_delete(realm_text_description_cache_key(realm))
|
2014-01-28 17:29:00 +01:00
|
|
|
|
2019-05-16 14:48:42 +02:00
|
|
|
if changed(kwargs, ['description']):
|
|
|
|
cache_delete(realm_rendered_description_cache_key(realm))
|
|
|
|
cache_delete(realm_text_description_cache_key(realm))
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def realm_alert_words_cache_key(realm: 'Realm') -> str:
|
2017-11-04 05:34:38 +01:00
|
|
|
return "realm_alert_words:%s" % (realm.string_id,)
|
2018-01-04 13:49:39 +01:00
|
|
|
|
2019-02-11 15:19:38 +01:00
|
|
|
def realm_alert_words_automaton_cache_key(realm: 'Realm') -> str:
|
|
|
|
return "realm_alert_words_automaton:%s" % (realm.string_id,)
|
|
|
|
|
2019-03-18 08:42:13 +01:00
|
|
|
def realm_rendered_description_cache_key(realm: 'Realm') -> str:
|
|
|
|
return "realm_rendered_description:%s" % (realm.string_id,)
|
|
|
|
|
2019-04-24 04:30:15 +02:00
|
|
|
def realm_text_description_cache_key(realm: 'Realm') -> str:
|
|
|
|
return "realm_text_description:%s" % (realm.string_id,)
|
|
|
|
|
2014-01-15 22:48:27 +01:00
|
|
|
# Called by models.py to flush the stream cache whenever we save a stream
|
|
|
|
# object.
|
2017-11-05 11:15:10 +01:00
|
|
|
def flush_stream(sender: Any, **kwargs: Any) -> None:
|
2014-02-26 00:12:14 +01:00
|
|
|
from zerver.models import UserProfile
|
2014-01-15 22:48:27 +01:00
|
|
|
stream = kwargs['instance']
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache = {}
|
2017-09-17 22:26:43 +02:00
|
|
|
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
|
2016-03-31 03:21:05 +02:00
|
|
|
cache_set_many(items_for_remote_cache)
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2016-04-15 00:32:47 +02:00
|
|
|
if kwargs.get('update_fields') is None or 'name' in kwargs['update_fields'] and \
|
2014-02-26 00:12:14 +01:00
|
|
|
UserProfile.objects.filter(
|
|
|
|
Q(default_sending_stream=stream) |
|
2017-01-24 06:02:39 +01:00
|
|
|
Q(default_events_register_stream=stream)).exists():
|
2017-02-06 20:45:26 +01:00
|
|
|
cache_delete(bot_dicts_in_realm_cache_key(stream.realm))
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2019-01-14 07:46:31 +01:00
|
|
|
def flush_used_upload_space_cache(sender: Any, **kwargs: Any) -> None:
|
|
|
|
attachment = kwargs['instance']
|
|
|
|
|
|
|
|
if kwargs.get("created") is None or kwargs.get("created") is True:
|
|
|
|
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm))
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def to_dict_cache_key_id(message_id: int) -> str:
|
2017-11-03 03:12:25 +01:00
|
|
|
return 'message_dict:%d' % (message_id,)
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def to_dict_cache_key(message: 'Message') -> str:
|
2017-10-20 20:29:49 +02:00
|
|
|
return to_dict_cache_key_id(message.id)
|
2016-10-04 15:40:02 +02:00
|
|
|
|
2019-01-27 07:18:19 +01:00
|
|
|
def open_graph_description_cache_key(content: Any, request: HttpRequest) -> str:
|
2019-04-20 01:00:46 +02:00
|
|
|
return 'open_graph_description_path:%s' % (make_safe_digest(request.META['PATH_INFO']),)
|
2019-01-27 07:18:19 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def flush_message(sender: Any, **kwargs: Any) -> None:
|
2016-10-04 15:40:02 +02:00
|
|
|
message = kwargs['instance']
|
2017-10-20 20:29:49 +02:00
|
|
|
cache_delete(to_dict_cache_key_id(message.id))
|
2018-01-12 08:57:10 +01:00
|
|
|
|
2018-02-11 14:09:17 +01:00
|
|
|
def flush_submessage(sender: Any, **kwargs: Any) -> None:
|
|
|
|
submessage = kwargs['instance']
|
|
|
|
# submessages are not cached directly, they are part of their
|
|
|
|
# parent messages
|
|
|
|
message_id = submessage.message_id
|
|
|
|
cache_delete(to_dict_cache_key_id(message_id))
|
|
|
|
|
2018-01-12 08:57:10 +01:00
|
|
|
DECORATOR = Callable[[Callable[..., Any]], Callable[..., Any]]
|
|
|
|
|
|
|
|
def ignore_unhashable_lru_cache(maxsize: int=128, typed: bool=False) -> DECORATOR:
|
|
|
|
"""
|
|
|
|
This is a wrapper over lru_cache function. It adds following features on
|
|
|
|
top of lru_cache:
|
|
|
|
|
|
|
|
* It will not cache result of functions with unhashable arguments.
|
|
|
|
* It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
|
|
|
|
"""
|
|
|
|
internal_decorator = lru_cache(maxsize=maxsize, typed=typed)
|
|
|
|
|
|
|
|
def decorator(user_function: Callable[..., Any]) -> Callable[..., Any]:
|
2018-03-05 18:26:58 +01:00
|
|
|
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
|
|
|
|
# In the development environment, we want every file
|
|
|
|
# change to refresh the source files from disk.
|
|
|
|
return user_function
|
2018-01-12 08:57:10 +01:00
|
|
|
cache_enabled_user_function = internal_decorator(user_function)
|
|
|
|
|
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
if not hasattr(cache_enabled_user_function, 'key_prefix'):
|
|
|
|
cache_enabled_user_function.key_prefix = KEY_PREFIX
|
|
|
|
|
|
|
|
if cache_enabled_user_function.key_prefix != KEY_PREFIX:
|
|
|
|
# Clear cache when cache.KEY_PREFIX changes. This is used in
|
|
|
|
# tests.
|
|
|
|
cache_enabled_user_function.cache_clear()
|
|
|
|
cache_enabled_user_function.key_prefix = KEY_PREFIX
|
|
|
|
|
|
|
|
try:
|
|
|
|
return cache_enabled_user_function(*args, **kwargs)
|
|
|
|
except TypeError:
|
|
|
|
# args or kwargs contains an element which is unhashable. In
|
|
|
|
# this case we don't cache the result.
|
|
|
|
pass
|
|
|
|
|
|
|
|
# Deliberately calling this function from outside of exception
|
|
|
|
# handler to get a more descriptive traceback. Otherise traceback
|
|
|
|
# can include the exception from cached_enabled_user_function as
|
|
|
|
# well.
|
|
|
|
return user_function(*args, **kwargs)
|
|
|
|
|
|
|
|
setattr(wrapper, 'cache_info', cache_enabled_user_function.cache_info)
|
|
|
|
setattr(wrapper, 'cache_clear', cache_enabled_user_function.cache_clear)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
return decorator
|
2019-04-18 04:35:14 +02:00
|
|
|
|
|
|
|
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]:
|
|
|
|
"""Wrapper that converts any dict args to dict item tuples."""
|
|
|
|
def dict_to_tuple(arg: Any) -> Any:
|
|
|
|
if isinstance(arg, dict):
|
|
|
|
return tuple(sorted(arg.items()))
|
|
|
|
return arg
|
|
|
|
|
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
new_args = (dict_to_tuple(arg) for arg in args)
|
|
|
|
return user_function(*new_args, **kwargs)
|
|
|
|
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]:
|
|
|
|
"""Wrapper that converts any dict items tuple args to dicts."""
|
|
|
|
def dict_items_to_dict(arg: Any) -> Any:
|
|
|
|
if isinstance(arg, tuple):
|
|
|
|
try:
|
|
|
|
return dict(arg)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
|
|
|
return arg
|
|
|
|
|
|
|
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
new_args = (dict_items_to_dict(arg) for arg in args)
|
|
|
|
new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()}
|
|
|
|
return user_function(*new_args, **new_kwargs)
|
|
|
|
|
|
|
|
return wrapper
|