message_cache: Rename "to_dict" functions which deal with bytes.

This commit is contained in:
Alex Vandiver 2023-10-12 16:47:19 +00:00 committed by Tim Abbott
parent b3e8878252
commit 6597c560cb
3 changed files with 14 additions and 14 deletions

View File

@ -73,8 +73,8 @@ def stringify_message_dict(message_dict: Dict[str, Any]) -> bytes:
@cache_with_key(to_dict_cache_key, timeout=3600 * 24)
def message_to_dict_json(message: Message, realm_id: Optional[int] = None) -> bytes:
return MessageDict.to_dict_uncached([message], realm_id)[message.id]
def message_to_encoded_cache(message: Message, realm_id: Optional[int] = None) -> bytes:
return MessageDict.messages_to_encoded_cache([message], realm_id)[message.id]
def update_message_cache(
@ -84,7 +84,7 @@ def update_message_cache(
messages)."""
items_for_remote_cache = {}
message_ids = []
changed_messages_to_dict = MessageDict.to_dict_uncached(changed_messages, realm_id)
changed_messages_to_dict = MessageDict.messages_to_encoded_cache(changed_messages, realm_id)
for msg_id, msg in changed_messages_to_dict.items():
message_ids.append(msg_id)
key = to_dict_cache_key_id(msg_id)
@ -157,8 +157,8 @@ class MessageDict:
to our message object, with the side effect of
populating the cache.
"""
json = message_to_dict_json(message, realm_id)
obj = extract_message_dict(json)
encoded_object_bytes = message_to_encoded_cache(message, realm_id)
obj = extract_message_dict(encoded_object_bytes)
"""
The steps below are similar to what we do in
@ -272,15 +272,15 @@ class MessageDict:
return sew_messages_and_reactions(messages, reactions)
@staticmethod
def to_dict_uncached(
def messages_to_encoded_cache(
messages: Collection[Message], realm_id: Optional[int] = None
) -> Dict[int, bytes]:
messages_dict = MessageDict.to_dict_uncached_helper(messages, realm_id)
messages_dict = MessageDict.messages_to_encoded_cache_helper(messages, realm_id)
encoded_messages = {msg["id"]: stringify_message_dict(msg) for msg in messages_dict}
return encoded_messages
@staticmethod
def to_dict_uncached_helper(
def messages_to_encoded_cache_helper(
messages: Collection[Message], realm_id: Optional[int] = None
) -> List[Dict[str, Any]]:
# Near duplicate of the build_message_dict + get_raw_db_rows

View File

@ -87,7 +87,7 @@ class MessageDictTest(ZulipTestCase):
msg_id: int, apply_markdown: bool, client_gravatar: bool
) -> Dict[str, Any]:
msg = reload_message(msg_id)
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
unhydrated_dict = MessageDict.messages_to_encoded_cache_helper([msg])[0]
# The next step mutates the dict in place
# for performance reasons.
MessageDict.post_process_dicts(
@ -265,7 +265,7 @@ class MessageDictTest(ZulipTestCase):
return Message.objects.get(id=msg_id)
def assert_topic_links(links: List[Dict[str, str]], msg: Message) -> None:
dct = MessageDict.to_dict_uncached_helper([msg])[0]
dct = MessageDict.messages_to_encoded_cache_helper([msg])[0]
self.assertEqual(dct[TOPIC_LINKS], links)
# Send messages before and after saving the realm filter from each user.

View File

@ -259,8 +259,8 @@ class EditMessagePayloadTest(EditMessageTestCase):
class EditMessageTest(EditMessageTestCase):
def test_query_count_on_to_dict_uncached(self) -> None:
# `to_dict_uncached` method is used by the mechanisms
def test_query_count_on_messages_to_encoded_cache(self) -> None:
# `messages_to_encoded_cache` method is used by the mechanisms
# tested in this class. Hence, its performance is tested here.
# Generate 2 messages
user = self.example_user("hamlet")
@ -288,12 +288,12 @@ class EditMessageTest(EditMessageTestCase):
# 1 query for linkifiers
# 1 query for display recipients
with self.assert_database_query_count(7):
MessageDict.to_dict_uncached(messages)
MessageDict.messages_to_encoded_cache(messages)
realm_id = 2 # Fetched from stream object
# Check number of queries performed with realm_id
with self.assert_database_query_count(3):
MessageDict.to_dict_uncached(messages, realm_id)
MessageDict.messages_to_encoded_cache(messages, realm_id)
def test_save_message(self) -> None:
"""This is also tested by a client test, but here we can verify