mirror of https://github.com/zulip/zulip.git
Annotate zerver/lib/cache_helpers.py
This commit is contained in:
parent
8c2382deeb
commit
2213a9f41f
|
@ -1,5 +1,5 @@
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
from typing import Any
|
from typing import Any, Dict, Callable, Tuple
|
||||||
|
|
||||||
# This file needs to be different from cache.py because cache.py
|
# This file needs to be different from cache.py because cache.py
|
||||||
# cannot import anything from zerver.models or we'd have an import
|
# cannot import anything from zerver.models or we'd have an import
|
||||||
|
@ -19,13 +19,16 @@ from django.db.models import Q
|
||||||
MESSAGE_CACHE_SIZE = 75000
|
MESSAGE_CACHE_SIZE = 75000
|
||||||
|
|
||||||
def cache_save_message(message):
|
def cache_save_message(message):
|
||||||
|
# type: (Message) -> None
|
||||||
cache_set(message_cache_key(message.id), message, timeout=3600*24)
|
cache_set(message_cache_key(message.id), message, timeout=3600*24)
|
||||||
|
|
||||||
@cache_with_key(message_cache_key, timeout=3600*24)
|
@cache_with_key(message_cache_key, timeout=3600*24)
|
||||||
def cache_get_message(message_id):
|
def cache_get_message(message_id):
|
||||||
|
# type: (int) -> Message
|
||||||
return Message.objects.select_related().get(id=message_id)
|
return Message.objects.select_related().get(id=message_id)
|
||||||
|
|
||||||
def message_fetch_objects():
|
def message_fetch_objects():
|
||||||
|
# type: () -> List[Any]
|
||||||
try:
|
try:
|
||||||
max_id = Message.objects.only('id').order_by("-id")[0].id
|
max_id = Message.objects.only('id').order_by("-id")[0].id
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -34,26 +37,33 @@ def message_fetch_objects():
|
||||||
id__gt=max_id - MESSAGE_CACHE_SIZE)
|
id__gt=max_id - MESSAGE_CACHE_SIZE)
|
||||||
|
|
||||||
def message_cache_items(items_for_remote_cache, message):
|
def message_cache_items(items_for_remote_cache, message):
|
||||||
|
# type: (Dict[str, Tuple[Message]], Message) -> None
|
||||||
items_for_remote_cache[message_cache_key(message.id)] = (message,)
|
items_for_remote_cache[message_cache_key(message.id)] = (message,)
|
||||||
|
|
||||||
def user_cache_items(items_for_remote_cache, user_profile):
|
def user_cache_items(items_for_remote_cache, user_profile):
|
||||||
|
# type: (Dict[str, Tuple[UserProfile]], UserProfile) -> None
|
||||||
items_for_remote_cache[user_profile_by_email_cache_key(user_profile.email)] = (user_profile,)
|
items_for_remote_cache[user_profile_by_email_cache_key(user_profile.email)] = (user_profile,)
|
||||||
items_for_remote_cache[user_profile_by_id_cache_key(user_profile.id)] = (user_profile,)
|
items_for_remote_cache[user_profile_by_id_cache_key(user_profile.id)] = (user_profile,)
|
||||||
|
|
||||||
def stream_cache_items(items_for_remote_cache, stream):
|
def stream_cache_items(items_for_remote_cache, stream):
|
||||||
|
# type: (Dict[str, Tuple[Stream]], Stream) -> None
|
||||||
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
|
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
|
||||||
|
|
||||||
def client_cache_items(items_for_remote_cache, client):
|
def client_cache_items(items_for_remote_cache, client):
|
||||||
|
# type: (Dict[str, Tuple[Client]], Client) -> None
|
||||||
items_for_remote_cache[get_client_cache_key(client.name)] = (client,)
|
items_for_remote_cache[get_client_cache_key(client.name)] = (client,)
|
||||||
|
|
||||||
def huddle_cache_items(items_for_remote_cache, huddle):
|
def huddle_cache_items(items_for_remote_cache, huddle):
|
||||||
|
# type: (Dict[str, Tuple[Huddle]], Huddle) -> None
|
||||||
items_for_remote_cache[huddle_hash_cache_key(huddle.huddle_hash)] = (huddle,)
|
items_for_remote_cache[huddle_hash_cache_key(huddle.huddle_hash)] = (huddle,)
|
||||||
|
|
||||||
def recipient_cache_items(items_for_remote_cache, recipient):
|
def recipient_cache_items(items_for_remote_cache, recipient):
|
||||||
|
# type: (Dict[str, Tuple[Recipient]], Recipient) -> None
|
||||||
items_for_remote_cache[get_recipient_cache_key(recipient.type, recipient.type_id)] = (recipient,)
|
items_for_remote_cache[get_recipient_cache_key(recipient.type, recipient.type_id)] = (recipient,)
|
||||||
|
|
||||||
session_engine = import_module(settings.SESSION_ENGINE)
|
session_engine = import_module(settings.SESSION_ENGINE)
|
||||||
def session_cache_items(items_for_remote_cache, session):
|
def session_cache_items(items_for_remote_cache, session):
|
||||||
|
# type: (Dict[str, str], Session) -> None
|
||||||
store = session_engine.SessionStore(session_key=session.session_key)
|
store = session_engine.SessionStore(session_key=session.session_key)
|
||||||
items_for_remote_cache[store.cache_key] = store.decode(session.session_data)
|
items_for_remote_cache[store.cache_key] = store.decode(session.session_data)
|
||||||
|
|
||||||
|
@ -71,9 +81,10 @@ cache_fillers = {
|
||||||
'message': (message_fetch_objects, message_cache_items, 3600 * 24, 1000),
|
'message': (message_fetch_objects, message_cache_items, 3600 * 24, 1000),
|
||||||
'huddle': (lambda: Huddle.objects.select_related().all(), huddle_cache_items, 3600*24*7, 10000),
|
'huddle': (lambda: Huddle.objects.select_related().all(), huddle_cache_items, 3600*24*7, 10000),
|
||||||
'session': (lambda: Session.objects.all(), session_cache_items, 3600*24*7, 10000),
|
'session': (lambda: Session.objects.all(), session_cache_items, 3600*24*7, 10000),
|
||||||
}
|
} # type: Dict[str, Tuple[Callable[[], List[Any]], Callable[[Dict[str, Any], Any], None], int, int]]
|
||||||
|
|
||||||
def fill_remote_cache(cache):
|
def fill_remote_cache(cache):
|
||||||
|
# type: (str) -> None
|
||||||
remote_cache_time_start = get_remote_cache_time()
|
remote_cache_time_start = get_remote_cache_time()
|
||||||
remote_cache_requests_start = get_remote_cache_requests()
|
remote_cache_requests_start = get_remote_cache_requests()
|
||||||
items_for_remote_cache = {} # type: Dict[str, Any]
|
items_for_remote_cache = {} # type: Dict[str, Any]
|
||||||
|
|
Loading…
Reference in New Issue