2018-07-31 23:07:42 +02:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
|
2018-08-01 23:15:14 +02:00
|
|
|
import datetime
|
|
|
|
import logging
|
2021-08-03 17:44:55 +02:00
|
|
|
from typing import Any, Callable, Dict, Iterable, List, Tuple
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.contrib.sessions.models import Session
|
|
|
|
from django.utils.timezone import now as timezone_now
|
2018-08-01 23:15:14 +02:00
|
|
|
|
2013-03-13 19:15:29 +01:00
|
|
|
# This file needs to be different from cache.py because cache.py
|
2013-07-29 23:03:31 +02:00
|
|
|
# cannot import anything from zerver.models or we'd have an import
|
2013-03-13 19:15:29 +01:00
|
|
|
# loop
|
2018-08-01 23:15:14 +02:00
|
|
|
from analytics.models import RealmCount
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
cache_set_many,
|
|
|
|
get_remote_cache_requests,
|
|
|
|
get_remote_cache_time,
|
2021-07-16 22:11:10 +02:00
|
|
|
get_stream_cache_key,
|
2020-06-11 00:54:34 +02:00
|
|
|
user_profile_by_api_key_cache_key,
|
|
|
|
user_profile_cache_key,
|
|
|
|
)
|
2020-07-01 00:31:28 +02:00
|
|
|
from zerver.lib.sessions import session_engine
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_all_api_keys
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
Huddle,
|
|
|
|
Stream,
|
|
|
|
UserProfile,
|
|
|
|
get_client_cache_key,
|
|
|
|
huddle_hash_cache_key,
|
|
|
|
)
|
2013-01-09 20:35:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def user_cache_items(
|
|
|
|
items_for_remote_cache: Dict[str, Tuple[UserProfile]], user_profile: UserProfile
|
|
|
|
) -> None:
|
2018-08-01 10:53:40 +02:00
|
|
|
for api_key in get_all_api_keys(user_profile):
|
|
|
|
items_for_remote_cache[user_profile_by_api_key_cache_key(api_key)] = (user_profile,)
|
2021-02-12 08:19:30 +01:00
|
|
|
items_for_remote_cache[user_profile_cache_key(user_profile.email, user_profile.realm)] = (
|
|
|
|
user_profile,
|
|
|
|
)
|
2018-08-01 21:56:24 +02:00
|
|
|
# We have other user_profile caches, but none of them are on the
|
|
|
|
# core serving path for lots of requests.
|
2013-03-13 18:52:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def stream_cache_items(items_for_remote_cache: Dict[str, Tuple[Stream]], stream: Stream) -> None:
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
|
2013-03-26 17:07:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def client_cache_items(items_for_remote_cache: Dict[str, Tuple[Client]], client: Client) -> None:
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[get_client_cache_key(client.name)] = (client,)
|
2013-03-26 17:07:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def huddle_cache_items(items_for_remote_cache: Dict[str, Tuple[Huddle]], huddle: Huddle) -> None:
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[huddle_hash_cache_key(huddle.huddle_hash)] = (huddle,)
|
2013-03-26 17:47:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def session_cache_items(items_for_remote_cache: Dict[str, str], session: Session) -> None:
|
2018-11-19 05:02:28 +01:00
|
|
|
if settings.SESSION_ENGINE != "django.contrib.sessions.backends.cached_db":
|
|
|
|
# If we're not using the cached_db session engine, we there
|
|
|
|
# will be no store.cache_key attribute, and in any case we
|
|
|
|
# don't need to fill the cache, since it won't exist.
|
|
|
|
return
|
2020-07-01 00:31:28 +02:00
|
|
|
store = session_engine.SessionStore(session_key=session.session_key)
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache[store.cache_key] = store.decode(session.session_data)
|
2013-04-23 21:17:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-01 23:15:14 +02:00
|
|
|
def get_active_realm_ids() -> List[int]:
|
2020-06-09 00:58:42 +02:00
|
|
|
"""For installations like Zulip Cloud hosting a lot of realms, it only makes
|
2018-08-01 23:15:14 +02:00
|
|
|
sense to do cache-filling work for realms that have any currently
|
|
|
|
active users/clients. Otherwise, we end up with every single-user
|
|
|
|
trial organization that has ever been created costing us N streams
|
|
|
|
worth of cache work (where N is the number of default streams for
|
|
|
|
a new organization).
|
|
|
|
"""
|
|
|
|
date = timezone_now() - datetime.timedelta(days=2)
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
|
|
|
RealmCount.objects.filter(end_time__gte=date, property="1day_actives::day", value__gt=0)
|
|
|
|
.distinct("realm_id")
|
|
|
|
.values_list("realm_id", flat=True)
|
|
|
|
)
|
|
|
|
|
2018-08-01 23:15:14 +02:00
|
|
|
|
|
|
|
def get_streams() -> List[Stream]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
|
|
|
Stream.objects.select_related()
|
|
|
|
.filter(realm__in=get_active_realm_ids())
|
|
|
|
.exclude(
|
2018-08-01 23:15:14 +02:00
|
|
|
# We filter out Zephyr realms, because they can easily
|
|
|
|
# have 10,000s of streams with only 1 subscriber.
|
2021-02-12 08:19:30 +01:00
|
|
|
is_in_zephyr_realm=True
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2018-08-01 23:15:14 +02:00
|
|
|
|
|
|
|
def get_users() -> List[UserProfile]:
|
|
|
|
return UserProfile.objects.select_related().filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
long_term_idle=False, realm__in=get_active_realm_ids()
|
|
|
|
)
|
|
|
|
|
2018-08-01 23:15:14 +02:00
|
|
|
|
2013-03-26 18:38:39 +01:00
|
|
|
# Format is (objects query, items filler function, timeout, batch size)
|
|
|
|
#
|
|
|
|
# The objects queries are put inside lambdas to prevent Django from
|
|
|
|
# doing any setup for things we're unlikely to use (without the lambda
|
|
|
|
# wrapper the below adds an extra 3ms or so to startup time for
|
|
|
|
# anything importing this file).
|
2021-02-12 08:19:30 +01:00
|
|
|
cache_fillers: Dict[
|
2021-08-03 17:44:55 +02:00
|
|
|
str, Tuple[Callable[[], Iterable[Any]], Callable[[Dict[str, Any], Any], None], int, int]
|
2021-02-12 08:19:30 +01:00
|
|
|
] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user": (get_users, user_cache_items, 3600 * 24 * 7, 10000),
|
|
|
|
"client": (
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: Client.objects.select_related().all(),
|
|
|
|
client_cache_items,
|
|
|
|
3600 * 24 * 7,
|
|
|
|
10000,
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream": (get_streams, stream_cache_items, 3600 * 24 * 7, 10000),
|
|
|
|
"huddle": (
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: Huddle.objects.select_related().all(),
|
|
|
|
huddle_cache_items,
|
|
|
|
3600 * 24 * 7,
|
|
|
|
10000,
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"session": (lambda: Session.objects.all(), session_cache_items, 3600 * 24 * 7, 10000),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2013-03-26 17:24:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fill_remote_cache(cache: str) -> None:
|
2016-03-31 03:23:21 +02:00
|
|
|
remote_cache_time_start = get_remote_cache_time()
|
2016-03-31 03:24:05 +02:00
|
|
|
remote_cache_requests_start = get_remote_cache_requests()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
items_for_remote_cache: Dict[str, Any] = {}
|
2013-03-26 18:38:39 +01:00
|
|
|
(objects, items_filler, timeout, batch_size) = cache_fillers[cache]
|
|
|
|
count = 0
|
|
|
|
for obj in objects():
|
2016-03-31 03:21:05 +02:00
|
|
|
items_filler(items_for_remote_cache, obj)
|
2013-03-26 18:38:39 +01:00
|
|
|
count += 1
|
2021-02-12 08:19:30 +01:00
|
|
|
if count % batch_size == 0:
|
|
|
|
cache_set_many(items_for_remote_cache, timeout=3600 * 24)
|
2016-03-31 03:21:05 +02:00
|
|
|
items_for_remote_cache = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
cache_set_many(items_for_remote_cache, timeout=3600 * 24 * 7)
|
|
|
|
logging.info(
|
|
|
|
"Successfully populated %s cache! Consumed %s remote cache queries (%s time)",
|
|
|
|
cache,
|
|
|
|
get_remote_cache_requests() - remote_cache_requests_start,
|
|
|
|
round(get_remote_cache_time() - remote_cache_time_start, 2),
|
|
|
|
)
|