2020-07-07 01:00:03 +02:00
|
|
|
from typing import Any, Dict, List, Union
|
2020-07-06 01:55:16 +02:00
|
|
|
from unittest import mock
|
|
|
|
|
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
|
|
|
|
from zerver.lib.cache import cache_delete, to_dict_cache_key_id
|
|
|
|
from zerver.lib.markdown import version as markdown_version
|
2020-07-08 00:06:01 +02:00
|
|
|
from zerver.lib.message import MessageDict, messages_for_ids, sew_messages_and_reactions
|
2020-07-06 01:55:16 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2022-10-15 22:47:40 +02:00
|
|
|
from zerver.lib.test_helpers import make_client
|
2020-07-06 01:55:16 +02:00
|
|
|
from zerver.lib.topic import TOPIC_LINKS
|
2020-07-07 01:00:03 +02:00
|
|
|
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
|
2020-07-06 01:55:16 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Reaction,
|
2021-03-25 15:42:51 +01:00
|
|
|
Realm,
|
2020-07-06 01:55:16 +02:00
|
|
|
RealmFilter,
|
|
|
|
Recipient,
|
2020-07-07 01:00:03 +02:00
|
|
|
Stream,
|
2020-07-06 01:55:16 +02:00
|
|
|
UserProfile,
|
|
|
|
flush_per_request_caches,
|
2020-07-07 00:30:53 +02:00
|
|
|
get_display_recipient,
|
2020-07-06 01:55:16 +02:00
|
|
|
get_realm,
|
2020-07-07 00:30:53 +02:00
|
|
|
get_stream,
|
2020-07-06 01:55:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class MessageDictTest(ZulipTestCase):
|
|
|
|
def test_both_codepaths(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-07-06 01:55:16 +02:00
|
|
|
We have two different codepaths that
|
|
|
|
extract a particular shape of dictionary
|
|
|
|
for messages to send to clients:
|
|
|
|
|
|
|
|
events:
|
|
|
|
|
|
|
|
These are the events we send to MANY
|
|
|
|
clients when a message is originally
|
|
|
|
sent.
|
|
|
|
|
|
|
|
fetch:
|
|
|
|
|
|
|
|
These are the messages we send to ONE
|
|
|
|
client when they fetch messages via
|
|
|
|
some narrow/search in the UI.
|
|
|
|
|
|
|
|
Different clients have different needs
|
|
|
|
when it comes to things like generating avatar
|
|
|
|
hashes or including both rendered and unrendered
|
2020-08-11 01:47:49 +02:00
|
|
|
Markdown, so that explains the different shapes.
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
And then the two codepaths have different
|
|
|
|
performance needs. In the events codepath, we
|
|
|
|
have the Django view generate a single "wide"
|
|
|
|
dictionary that gets put on the event queue,
|
|
|
|
and then we send events to multiple clients,
|
|
|
|
finalizing the payload for each of them depending
|
|
|
|
on the "shape" they want. (We also avoid
|
|
|
|
doing extra work for any two clients who want
|
|
|
|
the same shape dictionary, but that's out of the
|
|
|
|
scope of this particular test).
|
|
|
|
|
|
|
|
In the fetch scenario, the single client only needs
|
|
|
|
a dictionary of one shape, but we need to re-hydrate
|
|
|
|
the sender information, since the sender details
|
|
|
|
may have changed since the message was originally
|
|
|
|
sent.
|
|
|
|
|
|
|
|
This test simply verifies that the two codepaths
|
|
|
|
ultimately provide the same result.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def reload_message(msg_id: int) -> Message:
|
|
|
|
# Get a clean copy of the message, and
|
|
|
|
# clear the cache.
|
|
|
|
cache_delete(to_dict_cache_key_id(msg_id))
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def get_send_message_payload(
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id: int, apply_markdown: bool, client_gravatar: bool
|
|
|
|
) -> Dict[str, Any]:
|
2020-07-06 01:55:16 +02:00
|
|
|
msg = reload_message(msg_id)
|
|
|
|
wide_dict = MessageDict.wide_dict(msg)
|
|
|
|
|
|
|
|
narrow_dict = MessageDict.finalize_payload(
|
|
|
|
wide_dict,
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
return narrow_dict
|
|
|
|
|
|
|
|
def get_fetch_payload(
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id: int, apply_markdown: bool, client_gravatar: bool
|
|
|
|
) -> Dict[str, Any]:
|
2020-07-06 01:55:16 +02:00
|
|
|
msg = reload_message(msg_id)
|
|
|
|
unhydrated_dict = MessageDict.to_dict_uncached_helper([msg])[0]
|
|
|
|
# The next step mutates the dict in place
|
|
|
|
# for performance reasons.
|
|
|
|
MessageDict.post_process_dicts(
|
|
|
|
[unhydrated_dict],
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
final_dict = unhydrated_dict
|
|
|
|
return final_dict
|
|
|
|
|
|
|
|
def test_message_id() -> int:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
self.login_user(hamlet)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
hamlet,
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-06 01:55:16 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="before edit",
|
|
|
|
)
|
|
|
|
return msg_id
|
|
|
|
|
|
|
|
flag_setups = [
|
|
|
|
[False, False],
|
|
|
|
[False, True],
|
|
|
|
[True, False],
|
|
|
|
[True, True],
|
|
|
|
]
|
|
|
|
|
|
|
|
msg_id = test_message_id()
|
|
|
|
|
|
|
|
for (apply_markdown, client_gravatar) in flag_setups:
|
|
|
|
send_message_payload = get_send_message_payload(
|
|
|
|
msg_id,
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
|
|
|
|
fetch_payload = get_fetch_payload(
|
|
|
|
msg_id,
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(send_message_payload, fetch_payload)
|
|
|
|
|
|
|
|
def test_bulk_message_fetching(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("othello")
|
|
|
|
receiver = self.example_user("hamlet")
|
2022-09-27 21:42:31 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-07-06 01:55:16 +02:00
|
|
|
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name = "Çiğdem"
|
2020-07-06 01:55:16 +02:00
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
sending_client = make_client(name="test suite")
|
|
|
|
|
|
|
|
ids = []
|
|
|
|
for i in range(300):
|
|
|
|
for recipient in [pm_recipient, stream_recipient]:
|
|
|
|
message = Message(
|
|
|
|
sender=sender,
|
|
|
|
recipient=recipient,
|
2022-09-27 21:42:31 +02:00
|
|
|
realm=realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
content=f"whatever {i}",
|
|
|
|
rendered_content="DOES NOT MATTER",
|
2020-07-06 01:55:16 +02:00
|
|
|
rendered_content_version=markdown_version,
|
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=sending_client,
|
|
|
|
last_edit_time=timezone_now(),
|
2021-02-12 08:20:45 +01:00
|
|
|
edit_history="[]",
|
2020-07-06 01:55:16 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.set_topic_name("whatever")
|
2020-07-06 01:55:16 +02:00
|
|
|
message.save()
|
|
|
|
ids.append(message.id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
Reaction.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile=sender, message=message, emoji_name="simple_smile"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
num_ids = len(ids)
|
|
|
|
self.assertTrue(num_ids >= 600)
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(7):
|
2020-07-06 01:55:16 +02:00
|
|
|
rows = list(MessageDict.get_raw_db_rows(ids))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
objs = [MessageDict.build_dict_from_raw_db_row(row) for row in rows]
|
2020-07-06 01:55:16 +02:00
|
|
|
MessageDict.post_process_dicts(objs, apply_markdown=False, client_gravatar=False)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(rows, num_ids)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def test_applying_markdown(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("othello")
|
|
|
|
receiver = self.example_user("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
|
|
|
|
sending_client = make_client(name="test suite")
|
|
|
|
message = Message(
|
|
|
|
sender=sender,
|
|
|
|
recipient=recipient,
|
2022-09-27 21:42:31 +02:00
|
|
|
realm=receiver.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="hello **world**",
|
2020-07-06 01:55:16 +02:00
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=sending_client,
|
|
|
|
last_edit_time=timezone_now(),
|
2021-02-12 08:20:45 +01:00
|
|
|
edit_history="[]",
|
2020-07-06 01:55:16 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.set_topic_name("whatever")
|
2020-07-06 01:55:16 +02:00
|
|
|
message.save()
|
|
|
|
|
|
|
|
# An important part of this test is to get the message through this exact code path,
|
|
|
|
# because there is an ugly hack we need to cover. So don't just say "row = message".
|
|
|
|
row = MessageDict.get_raw_db_rows([message.id])[0]
|
|
|
|
dct = MessageDict.build_dict_from_raw_db_row(row)
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_content = "<p>hello <strong>world</strong></p>"
|
|
|
|
self.assertEqual(dct["rendered_content"], expected_content)
|
2020-07-06 01:55:16 +02:00
|
|
|
message = Message.objects.get(id=message.id)
|
|
|
|
self.assertEqual(message.rendered_content, expected_content)
|
|
|
|
self.assertEqual(message.rendered_content_version, markdown_version)
|
|
|
|
|
|
|
|
@mock.patch("zerver.lib.message.markdown_convert")
|
|
|
|
def test_applying_markdown_invalid_format(self, convert_mock: Any) -> None:
|
|
|
|
# pretend the converter returned an invalid message without raising an exception
|
|
|
|
convert_mock.return_value = None
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("othello")
|
|
|
|
receiver = self.example_user("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
|
|
|
|
sending_client = make_client(name="test suite")
|
|
|
|
message = Message(
|
|
|
|
sender=sender,
|
|
|
|
recipient=recipient,
|
2022-09-27 21:42:31 +02:00
|
|
|
realm=receiver.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="hello **world**",
|
2020-07-06 01:55:16 +02:00
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=sending_client,
|
|
|
|
last_edit_time=timezone_now(),
|
2021-02-12 08:20:45 +01:00
|
|
|
edit_history="[]",
|
2020-07-06 01:55:16 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.set_topic_name("whatever")
|
2020-07-06 01:55:16 +02:00
|
|
|
message.save()
|
|
|
|
|
|
|
|
# An important part of this test is to get the message through this exact code path,
|
|
|
|
# because there is an ugly hack we need to cover. So don't just say "row = message".
|
|
|
|
row = MessageDict.get_raw_db_rows([message.id])[0]
|
|
|
|
dct = MessageDict.build_dict_from_raw_db_row(row)
|
2021-02-12 08:19:30 +01:00
|
|
|
error_content = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>[Zulip note: Sorry, we could not understand the formatting of your message]</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(dct["rendered_content"], error_content)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def test_topic_links_use_stream_realm(self) -> None:
|
|
|
|
# Set up a realm filter on 'zulip' and assert that messages
|
2021-03-25 15:42:51 +01:00
|
|
|
# sent to a stream on 'zulip' have the topic linkified,
|
|
|
|
# and not linkified when sent to a stream in 'lear'.
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip_realm = get_realm("zulip")
|
2021-03-25 15:42:51 +01:00
|
|
|
lear_realm = get_realm("lear")
|
2020-07-06 01:55:16 +02:00
|
|
|
url_format_string = r"https://trac.example.com/ticket/%(id)s"
|
2021-01-26 07:32:29 +01:00
|
|
|
links = {"url": "https://trac.example.com/ticket/123", "text": "#123"}
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name = "test #123"
|
2020-07-06 01:55:16 +02:00
|
|
|
|
2021-03-30 12:08:03 +02:00
|
|
|
linkifier = RealmFilter(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=zulip_realm, pattern=r"#(?P<id>[0-9]{2,8})", url_format_string=url_format_string
|
|
|
|
)
|
2020-07-06 01:55:16 +02:00
|
|
|
self.assertEqual(
|
2022-10-08 07:35:48 +02:00
|
|
|
str(linkifier),
|
2021-02-12 08:20:45 +01:00
|
|
|
"<RealmFilter(zulip): #(?P<id>[0-9]{2,8}) https://trac.example.com/ticket/%(id)s>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
2021-03-25 15:42:51 +01:00
|
|
|
def get_message(sender: UserProfile, realm: Realm) -> Message:
|
|
|
|
stream_name = "Denmark"
|
|
|
|
if not Stream.objects.filter(realm=realm, name=stream_name).exists():
|
|
|
|
self.make_stream(stream_name, realm)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(sender, stream_name)
|
2021-03-25 15:42:51 +01:00
|
|
|
msg_id = self.send_stream_message(sender, "Denmark", "hello world", topic_name, realm)
|
2020-07-06 01:55:16 +02:00
|
|
|
return Message.objects.get(id=msg_id)
|
|
|
|
|
2021-01-26 07:32:29 +01:00
|
|
|
def assert_topic_links(links: List[Dict[str, str]], msg: Message) -> None:
|
2020-07-06 01:55:16 +02:00
|
|
|
dct = MessageDict.to_dict_uncached_helper([msg])[0]
|
|
|
|
self.assertEqual(dct[TOPIC_LINKS], links)
|
|
|
|
|
|
|
|
# Send messages before and after saving the realm filter from each user.
|
2021-03-25 15:42:51 +01:00
|
|
|
assert_topic_links([], get_message(self.example_user("othello"), zulip_realm))
|
|
|
|
assert_topic_links([], get_message(self.lear_user("cordelia"), lear_realm))
|
2021-03-08 11:39:48 +01:00
|
|
|
assert_topic_links([], get_message(self.notification_bot(zulip_realm), zulip_realm))
|
2021-03-30 12:08:03 +02:00
|
|
|
linkifier.save()
|
2021-03-25 15:42:51 +01:00
|
|
|
assert_topic_links([links], get_message(self.example_user("othello"), zulip_realm))
|
|
|
|
assert_topic_links([], get_message(self.lear_user("cordelia"), lear_realm))
|
2021-03-08 11:39:48 +01:00
|
|
|
assert_topic_links([links], get_message(self.notification_bot(zulip_realm), zulip_realm))
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def test_reaction(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("othello")
|
|
|
|
receiver = self.example_user("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
|
|
|
|
sending_client = make_client(name="test suite")
|
|
|
|
message = Message(
|
|
|
|
sender=sender,
|
|
|
|
recipient=recipient,
|
2022-09-27 21:42:31 +02:00
|
|
|
realm=receiver.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="hello **world**",
|
2020-07-06 01:55:16 +02:00
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=sending_client,
|
|
|
|
last_edit_time=timezone_now(),
|
2021-02-12 08:20:45 +01:00
|
|
|
edit_history="[]",
|
2020-07-06 01:55:16 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.set_topic_name("whatever")
|
2020-07-06 01:55:16 +02:00
|
|
|
message.save()
|
|
|
|
|
|
|
|
reaction = Reaction.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
message=message, user_profile=sender, emoji_name="simple_smile"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-06 01:55:16 +02:00
|
|
|
row = MessageDict.get_raw_db_rows([message.id])[0]
|
|
|
|
msg_dict = MessageDict.build_dict_from_raw_db_row(row)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(msg_dict["reactions"][0]["emoji_name"], reaction.emoji_name)
|
|
|
|
self.assertEqual(msg_dict["reactions"][0]["user_id"], sender.id)
|
|
|
|
self.assertEqual(msg_dict["reactions"][0]["user"]["id"], sender.id)
|
|
|
|
self.assertEqual(msg_dict["reactions"][0]["user"]["email"], sender.email)
|
|
|
|
self.assertEqual(msg_dict["reactions"][0]["user"]["full_name"], sender.full_name)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def test_missing_anchor(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
result = self.client_get(
|
2021-02-12 08:19:30 +01:00
|
|
|
"/json/messages",
|
|
|
|
{"use_first_unread_anchor": "false", "num_before": "1", "num_after": "1"},
|
|
|
|
)
|
2020-07-06 01:55:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(result, "Missing 'anchor' argument.")
|
2020-07-06 01:55:16 +02:00
|
|
|
|
|
|
|
def test_invalid_anchor(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-06 01:55:16 +02:00
|
|
|
result = self.client_get(
|
2021-02-12 08:19:30 +01:00
|
|
|
"/json/messages",
|
|
|
|
{
|
|
|
|
"use_first_unread_anchor": "false",
|
|
|
|
"num_before": "1",
|
|
|
|
"num_after": "1",
|
|
|
|
"anchor": "chocolate",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Invalid anchor")
|
2020-07-06 01:55:16 +02:00
|
|
|
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
class MessageHydrationTest(ZulipTestCase):
|
|
|
|
def test_hydrate_stream_recipient_info(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_id = get_stream("Verona", realm).id
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
obj = dict(
|
|
|
|
recipient_type=Recipient.STREAM,
|
|
|
|
recipient_type_id=stream_id,
|
|
|
|
sender_is_mirror_dummy=False,
|
|
|
|
sender_email=cordelia.email,
|
|
|
|
sender_full_name=cordelia.full_name,
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
MessageDict.hydrate_recipient_info(obj, "Verona")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(obj["display_recipient"], "Verona")
|
|
|
|
self.assertEqual(obj["type"], "stream")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
def test_hydrate_pm_recipient_info(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-07 00:30:53 +02:00
|
|
|
display_recipient: List[UserDisplayRecipient] = [
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="aaron@example.com",
|
|
|
|
full_name="Aaron Smith",
|
2020-07-07 00:30:53 +02:00
|
|
|
id=999,
|
|
|
|
is_mirror_dummy=False,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
obj = dict(
|
|
|
|
recipient_type=Recipient.PERSONAL,
|
|
|
|
recipient_type_id=None,
|
|
|
|
sender_is_mirror_dummy=False,
|
|
|
|
sender_email=cordelia.email,
|
|
|
|
sender_full_name=cordelia.full_name,
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
MessageDict.hydrate_recipient_info(obj, display_recipient)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
obj["display_recipient"],
|
2020-07-07 00:30:53 +02:00
|
|
|
[
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="aaron@example.com",
|
|
|
|
full_name="Aaron Smith",
|
2020-07-07 00:30:53 +02:00
|
|
|
id=999,
|
|
|
|
is_mirror_dummy=False,
|
|
|
|
),
|
|
|
|
dict(
|
|
|
|
email=cordelia.email,
|
|
|
|
full_name=cordelia.full_name,
|
|
|
|
id=cordelia.id,
|
|
|
|
is_mirror_dummy=False,
|
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(obj["type"], "private")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
def test_messages_for_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name = "test stream"
|
2020-07-07 00:30:53 +02:00
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
old_message_id = self.send_stream_message(cordelia, stream_name, content="foo")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
content = "hello @**King Hamlet**"
|
2020-07-07 00:30:53 +02:00
|
|
|
new_message_id = self.send_stream_message(cordelia, stream_name, content=content)
|
|
|
|
|
|
|
|
user_message_flags = {
|
2021-02-12 08:20:45 +01:00
|
|
|
old_message_id: ["read", "historical"],
|
|
|
|
new_message_id: ["mentioned"],
|
2020-07-07 00:30:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=[old_message_id, new_message_id],
|
|
|
|
user_message_flags=user_message_flags,
|
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 2)
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
for message in messages:
|
2021-02-12 08:20:45 +01:00
|
|
|
if message["id"] == old_message_id:
|
2020-07-07 00:30:53 +02:00
|
|
|
old_message = message
|
2021-02-12 08:20:45 +01:00
|
|
|
elif message["id"] == new_message_id:
|
2020-07-07 00:30:53 +02:00
|
|
|
new_message = message
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(old_message["content"], "<p>foo</p>")
|
|
|
|
self.assertEqual(old_message["flags"], ["read", "historical"])
|
2020-07-07 00:30:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn('class="user-mention"', new_message["content"])
|
|
|
|
self.assertEqual(new_message["flags"], ["mentioned"])
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
def test_display_recipient_up_to_date(self) -> None:
|
|
|
|
"""
|
|
|
|
This is a test for a bug where due to caching of message_dicts,
|
|
|
|
after updating a user's information, fetching those cached messages
|
|
|
|
via messages_for_ids would return message_dicts with display_recipient
|
|
|
|
still having the old information. The returned message_dicts should have
|
|
|
|
up-to-date display_recipients and we check for that here.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
message_id = self.send_personal_message(hamlet, cordelia, "test")
|
2020-07-07 00:30:53 +02:00
|
|
|
|
|
|
|
cordelia_recipient = cordelia.recipient
|
|
|
|
# Cause the display_recipient to get cached:
|
2022-05-31 01:27:38 +02:00
|
|
|
assert cordelia_recipient is not None
|
2020-07-07 00:30:53 +02:00
|
|
|
get_display_recipient(cordelia_recipient)
|
|
|
|
|
|
|
|
# Change cordelia's email:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia_new_email = "new-cordelia@zulip.com"
|
2020-07-07 00:30:53 +02:00
|
|
|
cordelia.email = cordelia_new_email
|
|
|
|
cordelia.save()
|
|
|
|
|
|
|
|
# Local display_recipient cache needs to be flushed.
|
|
|
|
# flush_per_request_caches() is called after every request,
|
|
|
|
# so it makes sense to run it here.
|
|
|
|
flush_per_request_caches()
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=[message_id],
|
2021-02-12 08:20:45 +01:00
|
|
|
user_message_flags={message_id: ["read"]},
|
2020-07-07 00:30:53 +02:00
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
message = messages[0]
|
|
|
|
|
|
|
|
# Find which display_recipient in the list is cordelia:
|
2021-02-12 08:20:45 +01:00
|
|
|
for display_recipient in message["display_recipient"]:
|
|
|
|
if display_recipient["id"] == cordelia.id:
|
2020-07-07 00:30:53 +02:00
|
|
|
cordelia_display_recipient = display_recipient
|
|
|
|
|
|
|
|
# Make sure the email is up-to-date.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(cordelia_display_recipient["email"], cordelia_new_email)
|
2020-07-07 01:00:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-07 01:00:03 +02:00
|
|
|
class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def _verify_display_recipient(
|
|
|
|
self,
|
|
|
|
display_recipient: DisplayRecipientT,
|
|
|
|
expected_recipient_objects: Union[Stream, List[UserProfile]],
|
|
|
|
) -> None:
|
2020-07-07 01:00:03 +02:00
|
|
|
if isinstance(expected_recipient_objects, Stream):
|
|
|
|
self.assertEqual(display_recipient, expected_recipient_objects.name)
|
|
|
|
|
|
|
|
else:
|
|
|
|
for user_profile in expected_recipient_objects:
|
|
|
|
recipient_dict: UserDisplayRecipient = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"email": user_profile.email,
|
|
|
|
"full_name": user_profile.full_name,
|
|
|
|
"id": user_profile.id,
|
|
|
|
"is_mirror_dummy": user_profile.is_mirror_dummy,
|
2020-07-07 01:00:03 +02:00
|
|
|
}
|
|
|
|
self.assertTrue(recipient_dict in display_recipient)
|
|
|
|
|
|
|
|
def test_display_recipient_personal(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
2020-07-07 01:00:03 +02:00
|
|
|
message_ids = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_personal_message(hamlet, cordelia, "test"),
|
|
|
|
self.send_personal_message(cordelia, othello, "test"),
|
2020-07-07 01:00:03 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=message_ids,
|
2021-02-12 08:20:45 +01:00
|
|
|
user_message_flags={message_id: ["read"] for message_id in message_ids},
|
2020-07-07 01:00:03 +02:00
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self._verify_display_recipient(messages[0]["display_recipient"], [hamlet, cordelia])
|
|
|
|
self._verify_display_recipient(messages[1]["display_recipient"], [cordelia, othello])
|
2020-07-07 01:00:03 +02:00
|
|
|
|
|
|
|
def test_display_recipient_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(cordelia, "Denmark")
|
|
|
|
|
2020-07-07 01:00:03 +02:00
|
|
|
message_ids = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_stream_message(cordelia, "Verona", content="test"),
|
|
|
|
self.send_stream_message(cordelia, "Denmark", content="test"),
|
2020-07-07 01:00:03 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=message_ids,
|
2021-02-12 08:20:45 +01:00
|
|
|
user_message_flags={message_id: ["read"] for message_id in message_ids},
|
2020-07-07 01:00:03 +02:00
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[0]["display_recipient"], get_stream("Verona", cordelia.realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[1]["display_recipient"], get_stream("Denmark", cordelia.realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-07 01:00:03 +02:00
|
|
|
|
|
|
|
def test_display_recipient_huddle(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
iago = self.example_user("iago")
|
2020-07-07 01:00:03 +02:00
|
|
|
message_ids = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_huddle_message(hamlet, [cordelia, othello], "test"),
|
|
|
|
self.send_huddle_message(cordelia, [hamlet, othello, iago], "test"),
|
2020-07-07 01:00:03 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=message_ids,
|
2021-02-12 08:20:45 +01:00
|
|
|
user_message_flags={message_id: ["read"] for message_id in message_ids},
|
2020-07-07 01:00:03 +02:00
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[0]["display_recipient"], [hamlet, cordelia, othello]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[1]["display_recipient"], [hamlet, cordelia, othello, iago]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-07 01:00:03 +02:00
|
|
|
|
|
|
|
def test_display_recipient_various_types(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
iago = self.example_user("iago")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
|
|
|
self.subscribe(hamlet, "Scotland")
|
|
|
|
|
2020-07-07 01:00:03 +02:00
|
|
|
message_ids = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_huddle_message(hamlet, [cordelia, othello], "test"),
|
|
|
|
self.send_stream_message(cordelia, "Verona", content="test"),
|
|
|
|
self.send_personal_message(hamlet, cordelia, "test"),
|
|
|
|
self.send_stream_message(cordelia, "Denmark", content="test"),
|
|
|
|
self.send_huddle_message(cordelia, [hamlet, othello, iago], "test"),
|
|
|
|
self.send_personal_message(cordelia, othello, "test"),
|
2020-07-07 01:00:03 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
messages = messages_for_ids(
|
|
|
|
message_ids=message_ids,
|
2021-02-12 08:20:45 +01:00
|
|
|
user_message_flags={message_id: ["read"] for message_id in message_ids},
|
2020-07-07 01:00:03 +02:00
|
|
|
search_fields={},
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
allow_edit_history=False,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[0]["display_recipient"], [hamlet, cordelia, othello]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[1]["display_recipient"], get_stream("Verona", hamlet.realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self._verify_display_recipient(messages[2]["display_recipient"], [hamlet, cordelia])
|
2021-02-12 08:19:30 +01:00
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[3]["display_recipient"], get_stream("Denmark", hamlet.realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self._verify_display_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
messages[4]["display_recipient"], [hamlet, cordelia, othello, iago]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self._verify_display_recipient(messages[5]["display_recipient"], [cordelia, othello])
|
2020-07-08 00:06:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-08 00:06:01 +02:00
|
|
|
class SewMessageAndReactionTest(ZulipTestCase):
|
|
|
|
def test_sew_messages_and_reaction(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("othello")
|
|
|
|
receiver = self.example_user("hamlet")
|
2022-09-27 21:42:31 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-07-08 00:06:01 +02:00
|
|
|
pm_recipient = Recipient.objects.get(type_id=receiver.id, type=Recipient.PERSONAL)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name = "Çiğdem"
|
2020-07-08 00:06:01 +02:00
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
stream_recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
sending_client = make_client(name="test suite")
|
|
|
|
|
|
|
|
needed_ids = []
|
|
|
|
for i in range(5):
|
|
|
|
for recipient in [pm_recipient, stream_recipient]:
|
|
|
|
message = Message(
|
|
|
|
sender=sender,
|
|
|
|
recipient=recipient,
|
2022-09-27 21:42:31 +02:00
|
|
|
realm=realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
content=f"whatever {i}",
|
2020-07-08 00:06:01 +02:00
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=sending_client,
|
|
|
|
last_edit_time=timezone_now(),
|
2021-02-12 08:20:45 +01:00
|
|
|
edit_history="[]",
|
2020-07-08 00:06:01 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.set_topic_name("whatever")
|
2020-07-08 00:06:01 +02:00
|
|
|
message.save()
|
|
|
|
needed_ids.append(message.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
reaction = Reaction(user_profile=sender, message=message, emoji_name="simple_smile")
|
2020-07-08 00:06:01 +02:00
|
|
|
reaction.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
messages = Message.objects.filter(id__in=needed_ids).values(*["id", "content"])
|
2020-07-08 00:06:01 +02:00
|
|
|
reactions = Reaction.get_raw_db_rows(needed_ids)
|
|
|
|
tied_data = sew_messages_and_reactions(messages, reactions)
|
|
|
|
for data in tied_data:
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["reactions"], 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["reactions"][0]["emoji_name"], "simple_smile")
|
|
|
|
self.assertTrue(data["id"])
|
|
|
|
self.assertTrue(data["content"])
|