2023-11-19 19:45:19 +01:00
|
|
|
from datetime import timedelta
|
2020-07-03 11:09:37 +02:00
|
|
|
from operator import itemgetter
|
|
|
|
from unittest import mock
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2021-05-26 21:20:11 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
from zerver.actions.message_edit import get_mentions_for_message_updates
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
|
2024-03-06 18:10:25 +01:00
|
|
|
from zerver.actions.streams import do_deactivate_stream
|
2023-06-14 13:35:54 +02:00
|
|
|
from zerver.actions.user_groups import add_subgroups_to_user_group, check_add_user_group
|
2023-04-09 20:58:00 +02:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2024-03-06 18:10:25 +01:00
|
|
|
from zerver.lib.message import messages_for_ids
|
2023-10-03 03:25:57 +02:00
|
|
|
from zerver.lib.message_cache import MessageDict
|
2024-03-06 18:10:25 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2023-08-11 15:28:22 +02:00
|
|
|
from zerver.lib.test_helpers import queries_captured
|
2024-03-06 18:10:25 +01:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2022-05-31 01:34:34 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2024-07-28 03:21:33 +02:00
|
|
|
from zerver.models import Attachment, Message, NamedUserGroup, Realm, UserProfile, UserTopic
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-05-22 11:43:10 +02:00
|
|
|
from zerver.models.realms import EditTopicPolicyEnum, WildcardMentionPolicyEnum, get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
class EditMessageTest(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_message(self, msg_id: int, topic_name: str, content: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
# Make sure we saved the message correctly to the DB.
|
2023-11-08 04:53:05 +01:00
|
|
|
msg = Message.objects.select_related("realm").get(id=msg_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
self.assertEqual(msg.content, content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
We assume our caller just edited a message.
|
|
|
|
|
|
|
|
Next, we will make sure we properly cached the messages. We still have
|
|
|
|
to do a query to hydrate recipient info, but we won't need to hit the
|
|
|
|
zerver_message table.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured(keep_cache_warm=True) as queries:
|
2020-07-03 11:09:37 +02:00
|
|
|
(fetch_message_dict,) = messages_for_ids(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids=[msg.id],
|
2020-07-03 11:09:37 +02:00
|
|
|
user_message_flags={msg_id: []},
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields={},
|
2020-07-03 11:09:37 +02:00
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
allow_edit_history=True,
|
2023-11-08 04:53:05 +01:00
|
|
|
user_profile=None,
|
|
|
|
realm=msg.realm,
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
self.assert_length(queries, 1)
|
2020-07-03 11:09:37 +02:00
|
|
|
for query in queries:
|
2023-06-06 23:54:19 +02:00
|
|
|
self.assertNotIn("message", query.sql)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict[TOPIC_NAME],
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["content"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.content,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["sender_id"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.sender_id,
|
|
|
|
)
|
|
|
|
|
2022-02-14 17:04:39 +01:00
|
|
|
if msg.edit_history:
|
2024-08-26 11:02:11 +02:00
|
|
|
message_edit_history = orjson.loads(msg.edit_history)
|
|
|
|
for item in message_edit_history:
|
|
|
|
if "prev_rendered_content_version" in item:
|
|
|
|
del item["prev_rendered_content_version"]
|
|
|
|
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict["edit_history"],
|
2024-08-26 11:02:11 +02:00
|
|
|
message_edit_history,
|
2022-02-14 17:04:39 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-27 06:24:06 +02:00
|
|
|
def test_edit_message_no_changes(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
2022-02-09 22:43:52 +01:00
|
|
|
{},
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
|
|
|
# Right now, we prevent users from editing widgets.
|
|
|
|
def test_edit_submessage(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-05-27 06:24:06 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="/poll Games?\nYES\nNO",
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "/poll Games?\nYES\nNO\nMaybe",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Widgets cannot be edited.")
|
|
|
|
|
2023-10-12 18:47:19 +02:00
|
|
|
def test_query_count_on_messages_to_encoded_cache(self) -> None:
|
|
|
|
# `messages_to_encoded_cache` method is used by the mechanisms
|
2020-07-03 11:09:37 +02:00
|
|
|
# tested in this class. Hence, its performance is tested here.
|
|
|
|
# Generate 2 messages
|
|
|
|
user = self.example_user("hamlet")
|
2021-03-08 11:39:48 +01:00
|
|
|
realm = user.realm
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(user)
|
|
|
|
stream_name = "public_stream"
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
message_ids = []
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user, stream_name, "Message one"))
|
2020-07-03 11:09:37 +02:00
|
|
|
user_2 = self.example_user("cordelia")
|
|
|
|
self.subscribe(user_2, stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user_2, stream_name, "Message two"))
|
2021-03-08 11:39:48 +01:00
|
|
|
self.subscribe(self.notification_bot(realm), stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(
|
2021-03-08 11:39:48 +01:00
|
|
|
self.send_stream_message(self.notification_bot(realm), stream_name, "Message three")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
messages = [
|
2023-08-01 18:34:17 +02:00
|
|
|
Message.objects.select_related(*Message.DEFAULT_SELECT_RELATED).get(id=message_id)
|
|
|
|
for message_id in message_ids
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check number of queries performed
|
|
|
|
# 1 query for realm_id per message = 3
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2023-08-11 16:40:06 +02:00
|
|
|
# 1 query for linkifiers
|
|
|
|
# 1 query for display recipients
|
|
|
|
with self.assert_database_query_count(7):
|
2023-10-12 18:47:19 +02:00
|
|
|
MessageDict.messages_to_encoded_cache(messages)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
realm_id = 2 # Fetched from stream object
|
|
|
|
# Check number of queries performed with realm_id
|
2023-08-11 16:40:06 +02:00
|
|
|
with self.assert_database_query_count(3):
|
2023-10-12 18:47:19 +02:00
|
|
|
MessageDict.messages_to_encoded_cache(messages, realm_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_save_message(self) -> None:
|
|
|
|
"""This is also tested by a client test, but here we can verify
|
|
|
|
the cache against the database"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content="after edit")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.assertEqual(Message.objects.get(id=msg_id).topic_name(), "edited")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 05:33:20 +01:00
|
|
|
def test_fetch_message_from_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
from_user=self.example_user("hamlet"),
|
|
|
|
to_user=self.example_user("cordelia"),
|
2022-03-02 05:33:20 +01:00
|
|
|
content="Personal message",
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "Personal message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], msg_id)
|
2023-09-30 12:40:39 +02:00
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read"])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Send message to web-public stream where hamlet is not subscribed.
|
2022-03-02 05:33:20 +01:00
|
|
|
# This will test case of user having no `UserMessage` but having access
|
|
|
|
# to message.
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(self.example_user("cordelia"), web_public_stream.name)
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
self.example_user("cordelia"), web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read", "historical"])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Spectator should be able to fetch message in web-public stream.
|
2022-03-02 05:33:20 +01:00
|
|
|
self.logout()
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify default is apply_markdown=True
|
2022-06-07 01:37:01 +02:00
|
|
|
self.assertEqual(response_dict["message"]["content"], "<p>web-public message</p>")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify apply_markdown=False works correctly.
|
|
|
|
result = self.client_get(
|
|
|
|
"/json/messages/" + str(web_public_stream_msg_id), {"apply_markdown": "false"}
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["content"], "web-public message")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Test error cases
|
2022-03-02 05:33:20 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/999999")
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
def test_fetch_raw_message_spectator(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(user_profile, web_public_stream.name)
|
|
|
|
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
user_profile, web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
|
|
|
|
non_web_public_stream = self.make_stream("non-web-public-stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(user_profile, non_web_public_stream.name)
|
2021-09-20 10:34:10 +02:00
|
|
|
non_web_public_stream_msg_id = self.send_stream_message(
|
2022-04-28 05:15:11 +02:00
|
|
|
user_profile, non_web_public_stream.name, content="non-web-public message"
|
2021-09-20 10:34:10 +02:00
|
|
|
)
|
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Generate a direct message to use in verification.
|
2021-09-20 10:34:10 +02:00
|
|
|
private_message_id = self.send_personal_message(user_profile, user_profile)
|
|
|
|
|
|
|
|
invalid_message_id = private_message_id + 1000
|
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Confirm WEB_PUBLIC_STREAMS_ENABLED is enforced.
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2021-11-24 08:23:38 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "enable_spectator_access", False, acting_user=None
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
# Verify success with web-public stream and default SELF_HOSTED plan type.
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read"])
|
2021-09-20 10:34:10 +02:00
|
|
|
|
|
|
|
# Verify LIMITED plan type does not allow web-public access.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user_profile.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2022-06-02 15:56:30 +02:00
|
|
|
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
# Verify works with STANDARD_FREE plan type too.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(
|
|
|
|
user_profile.realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None
|
|
|
|
)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
2021-09-20 10:34:10 +02:00
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Verify direct messages are rejected.
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(private_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify an actual public stream is required.
|
|
|
|
result = self.client_get("/json/messages/" + str(non_web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify invalid message IDs are rejected with the same error message.
|
|
|
|
result = self.client_get("/json/messages/" + str(invalid_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify deactivated streams are rejected. This may change in the future.
|
|
|
|
do_deactivate_stream(web_public_stream, acting_user=None)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("public_stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mit_user = self.mit_user("sipbtest")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(mit_user)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}", subdomain="zephyr")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_fetch_raw_message_private_stream(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("private_stream", invite_only=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_no_permission(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
2023-05-09 20:22:54 +02:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
do_set_realm_property(realm, "allow_message_editing", False, acting_user=None)
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "content after edit",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Your organization has turned off message editing")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_message_no_content(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": " ",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = Message.objects.filter(id=msg_id).values_list("content", flat=True)[0]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(content, "(deleted)")
|
|
|
|
|
2023-06-15 00:39:53 +02:00
|
|
|
def test_edit_message_in_unsubscribed_private_stream(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.make_stream("privatestream", invite_only=True, history_public_to_subscribers=False)
|
|
|
|
self.subscribe(hamlet, "privatestream")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
hamlet, "privatestream", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ensure the user originally could edit the message. This ensures the
|
|
|
|
# loss of the ability is caused by unsubscribing, rather than something
|
|
|
|
# else wrong with the test's setup/assumptions.
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "test can edit before unsubscribing",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.unsubscribe(hamlet, "privatestream")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "after unsubscribing",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
|
|
|
content = Message.objects.get(id=msg_id).content
|
|
|
|
self.assertEqual(content, "test can edit before unsubscribing")
|
|
|
|
|
|
|
|
def test_edit_message_guest_in_unsubscribed_public_stream(self) -> None:
|
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
self.login("polonius")
|
|
|
|
self.assertEqual(guest_user.role, UserProfile.ROLE_GUEST)
|
|
|
|
|
|
|
|
self.make_stream("publicstream", invite_only=False)
|
|
|
|
self.subscribe(guest_user, "publicstream")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
guest_user, "publicstream", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Ensure the user originally could edit the message.
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "test can edit before unsubscribing",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.unsubscribe(guest_user, "publicstream")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "after unsubscribing",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
|
|
|
content = Message.objects.get(id=msg_id).content
|
|
|
|
self.assertEqual(content, "test can edit before unsubscribing")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_message_history_disabled(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(user_profile.realm, "allow_edit_history", False, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content="content before edit",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Message edit history is disabled in this organization")
|
|
|
|
|
|
|
|
# Now verify that if we fetch the message directly, there's no
|
|
|
|
# edit history data attached.
|
2021-02-12 08:19:30 +01:00
|
|
|
messages_result = self.client_get(
|
|
|
|
"/json/messages", {"anchor": msg_id_1, "num_before": 0, "num_after": 10}
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(messages_result)
|
2020-10-30 01:18:43 +01:00
|
|
|
json_messages = orjson.loads(messages_result.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
for msg in json_messages["messages"]:
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertNotIn("edit_history", msg)
|
|
|
|
|
|
|
|
def test_edit_message_history(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="content before edit",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history_1[0]["rendered_content"], "<p>content before edit</p>")
|
|
|
|
self.assertEqual(message_history_1[1]["rendered_content"], "<p>content after edit</p>")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted">after</span> '
|
|
|
|
'<span class="highlight_text_deleted">before</span>'
|
2021-10-01 00:14:28 +02:00
|
|
|
" edit</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
# Check content of message before edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["prev_rendered_content"], "<p>content before edit</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Edits on new lines
|
|
|
|
msg_id_2 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:20:45 +01:00
|
|
|
content="content before edit, line 1\n\ncontent before edit, line 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
new_content_2 = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"content before edit, line 1\n"
|
|
|
|
"content after edit, line 2\n"
|
|
|
|
"content before edit, line 3"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result_2 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_2,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_2)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_2 = self.client_get(f"/json/messages/{msg_id_2}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_2 = orjson.loads(message_edit_history_2.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2 = json_response_2["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[0]["rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["rendered_content"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content before edit, line 1<br>\n"
|
|
|
|
"content after edit, line 2<br>\n"
|
|
|
|
"content before edit, line 3</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content before edit, line 1<br> "
|
2021-02-12 08:19:30 +01:00
|
|
|
'content <span class="highlight_text_inserted">after edit, line 2<br> '
|
2021-10-01 00:14:28 +02:00
|
|
|
"content</span> before edit, line 3</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["prev_rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-10-01 00:14:28 +02:00
|
|
|
def test_empty_message_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-10-01 00:14:28 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="We will edit this to render as empty.",
|
|
|
|
)
|
|
|
|
# Edit that manually to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
self.assert_json_success(
|
|
|
|
self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "We will edit this to also render as empty.",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# And again tweak to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
history = self.client_get("/json/messages/" + str(msg_id) + "/history")
|
|
|
|
message_history = orjson.loads(history.content)["message_history"]
|
|
|
|
self.assertEqual(message_history[0]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["content_html_diff"], "<div></div>")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_link(self) -> None:
|
|
|
|
# Link editing
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="Here is a link to [zulip](www.zulip.org).",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "Here is a link to [zulip](www.zulipchat.com)."
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[0]["rendered_content"],
|
2023-01-03 01:51:16 +01:00
|
|
|
'<p>Here is a link to <a href="http://www.zulip.org">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["rendered_content"],
|
2023-01-03 01:51:16 +01:00
|
|
|
'<p>Here is a link to <a href="http://www.zulipchat.com">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
'<div><p>Here is a link to <a href="http://www.zulipchat.com"'
|
2021-02-12 08:20:45 +01:00
|
|
|
">zulip "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
|
|
|
|
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
|
2021-10-01 00:14:28 +02:00
|
|
|
"</span> </a></p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_history_unedited(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="editing",
|
|
|
|
content="This message has not been edited.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
message_history = self.assert_json_success(result)["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_length(message_history, 1)
|
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
def test_mentions_for_message_updates(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.login_user(hamlet)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(hamlet, "Denmark")
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-11 16:26:54 +02:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
hamlet, "Denmark", content="@**Cordelia, Lear's daughter**"
|
2021-04-11 16:26:54 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
mention_user_ids = get_mentions_for_message_updates(msg_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(mention_user_ids, {cordelia.id})
|
|
|
|
|
|
|
|
def test_edit_cases(self) -> None:
|
|
|
|
"""This test verifies the accuracy of construction of Zulip's edit
|
|
|
|
history data structures."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2022-03-02 01:08:38 +01:00
|
|
|
stream_1 = self.make_stream("stream 1")
|
|
|
|
stream_2 = self.make_stream("stream 2")
|
|
|
|
stream_3 = self.make_stream("stream 3")
|
|
|
|
self.subscribe(hamlet, stream_1.name)
|
|
|
|
self.subscribe(hamlet, stream_2.name)
|
|
|
|
self.subscribe(hamlet, stream_3.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
2022-03-02 01:08:38 +01:00
|
|
|
self.example_user("hamlet"), "stream 1", topic_name="topic 1", content="content 1"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-03-02 01:08:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 1")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
2022-03-02 02:25:25 +01:00
|
|
|
{"timestamp", "prev_topic", "topic", "user_id"},
|
2022-03-02 01:08:38 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": stream_2.id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_1.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
|
|
|
self.assertEqual(set(history[0].keys()), {"timestamp", "prev_stream", "stream", "user_id"})
|
|
|
|
|
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 3",
|
|
|
|
"topic": "topic 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
2022-03-02 01:08:38 +01:00
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
2021-02-12 08:20:45 +01:00
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 4",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 4",
|
2022-03-02 01:08:38 +01:00
|
|
|
"stream_id": stream_3.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
|
|
|
"timestamp",
|
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
|
|
|
"prev_stream",
|
|
|
|
"stream",
|
|
|
|
"user_id",
|
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
# Now, we verify that all of the edits stored in the message.edit_history
|
|
|
|
# have the correct data structure
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[2]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[2]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[3]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[3]["prev_stream"], stream_1.id)
|
|
|
|
|
|
|
|
self.assertEqual(history[4]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[4]["topic"], "topic 2")
|
|
|
|
|
|
|
|
self.assertEqual(history[5]["prev_content"], "content 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Now, we verify that the edit history data sent back has the
|
|
|
|
# correct filled-out fields
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response = orjson.loads(message_edit_history.content)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# We reverse the message history view output so that the IDs line up with the above.
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history = list(reversed(json_response["message_history"]))
|
2024-02-02 01:38:52 +01:00
|
|
|
for i, entry in enumerate(message_history):
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"}
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 2, 4}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_topic")
|
2022-03-02 01:08:38 +01:00
|
|
|
expected_entries.add("topic")
|
|
|
|
if i in {1, 2, 5}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_content")
|
|
|
|
expected_entries.add("prev_rendered_content")
|
|
|
|
expected_entries.add("content_html_diff")
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 3}:
|
|
|
|
expected_entries.add("prev_stream")
|
2022-02-14 17:04:39 +01:00
|
|
|
expected_entries.add("stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(expected_entries, set(entry.keys()))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assert_length(message_history, 7)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["topic"], "topic 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_topic"], "topic 3")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[0]["stream"], stream_3.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_stream"], stream_2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["content"], "content 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[1]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[1]["content"], "content 4")
|
|
|
|
self.assertEqual(message_history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[2]["topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[2]["content"], "content 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[3]["topic"], "topic 2")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[3]["stream"], stream_2.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[3]["prev_stream"], stream_1.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[3]["content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[4]["topic"], "topic 2")
|
|
|
|
self.assertEqual(message_history[4]["prev_topic"], "topic 1")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[4]["content"], "content 2")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[5]["topic"], "topic 1")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[5]["content"], "content 2")
|
|
|
|
self.assertEqual(message_history[5]["prev_content"], "content 1")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[6]["content"], "content 1")
|
|
|
|
self.assertEqual(message_history[6]["topic"], "topic 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_content_limit(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
2024-07-12 02:30:23 +02:00
|
|
|
message_content_edit_limit_seconds: int | str,
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
2022-04-12 13:13:02 +02:00
|
|
|
"message_content_edit_limit_seconds": orjson.dumps(
|
|
|
|
message_content_edit_limit_seconds
|
|
|
|
).decode(),
|
2024-05-22 11:43:10 +02:00
|
|
|
"edit_topic_policy": orjson.dumps(edit_topic_policy).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_success(
|
|
|
|
id_: int, unique_str: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2024-01-15 12:17:50 +01:00
|
|
|
new_topic_name = "topic" + unique_str
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content = "content" + unique_str
|
2024-01-15 12:17:50 +01:00
|
|
|
params_dict = {"topic": new_topic_name}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
if topic_only:
|
2024-03-06 18:10:25 +01:00
|
|
|
self.assertEqual(Message.objects.get(id=id_).topic_name(), new_topic_name)
|
2020-07-03 11:09:37 +02:00
|
|
|
else:
|
2024-01-15 12:17:50 +01:00
|
|
|
self.check_message(id_, topic_name=new_topic_name, content=new_content)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
2024-01-15 12:17:50 +01:00
|
|
|
old_topic_name = message.topic_name()
|
2020-07-03 11:09:37 +02:00
|
|
|
old_content = message.content
|
2024-01-15 12:17:50 +01:00
|
|
|
new_topic_name = "topic" + unique_str
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content = "content" + unique_str
|
2024-01-15 12:17:50 +01:00
|
|
|
params_dict = {"topic": new_topic_name}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
|
|
|
|
msg = Message.objects.get(id=id_)
|
2024-01-15 12:17:50 +01:00
|
|
|
self.assertEqual(msg.topic_name(), old_topic_name)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
2024-07-14 21:06:04 +02:00
|
|
|
message.date_sent -= timedelta(seconds=180)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
|
|
|
|
|
|
|
# test the various possible message editing settings
|
|
|
|
# high enough time limit, all edits allowed
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, 240, EditTopicPolicyEnum.ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "A")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# out of time, only topic editing allowed
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, 120, EditTopicPolicyEnum.ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
do_edit_message_assert_error(id_, "C", "The time limit for editing this message has passed")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# infinite time, all edits allowed
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "D")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-19 12:53:29 +02:00
|
|
|
# without allow_message_editing, editing content is not allowed but
|
|
|
|
# editing topic is allowed if topic-edit time limit has not passed
|
|
|
|
# irrespective of content-edit time limit.
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(False, 240, EditTopicPolicyEnum.ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(False, 240, EditTopicPolicyEnum.ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", True)
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(False, 120, EditTopicPolicyEnum.ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "F", True)
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(False, "unlimited", EditTopicPolicyEnum.ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "G", True)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 12:21:37 +02:00
|
|
|
def test_edit_topic_policy(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
2024-07-12 02:30:23 +02:00
|
|
|
message_content_edit_limit_seconds: int | str,
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-05-29 11:28:28 +02:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
2022-04-12 13:13:02 +02:00
|
|
|
"message_content_edit_limit_seconds": orjson.dumps(
|
|
|
|
message_content_edit_limit_seconds
|
|
|
|
).decode(),
|
2024-05-22 11:43:10 +02:00
|
|
|
"edit_topic_policy": orjson.dumps(edit_topic_policy).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_success(id_: int, unique_str: str, acting_user: str) -> None:
|
|
|
|
self.login(acting_user)
|
2024-01-15 12:17:50 +01:00
|
|
|
new_topic_name = "topic" + unique_str
|
|
|
|
params_dict = {"topic": new_topic_name}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.assertEqual(Message.objects.get(id=id_).topic_name(), new_topic_name)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, acting_user: str
|
|
|
|
) -> None:
|
|
|
|
self.login(acting_user)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
2024-01-15 12:17:50 +01:00
|
|
|
old_topic_name = message.topic_name()
|
2020-07-03 11:09:37 +02:00
|
|
|
old_content = message.content
|
2024-01-15 12:17:50 +01:00
|
|
|
new_topic_name = "topic" + unique_str
|
|
|
|
params_dict = {"topic": new_topic_name}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
msg = Message.objects.get(id=id_)
|
2024-01-15 12:17:50 +01:00
|
|
|
self.assertEqual(msg.topic_name(), old_topic_name)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
2024-07-14 21:06:04 +02:00
|
|
|
message.date_sent -= timedelta(seconds=180)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
|
|
|
|
2021-05-26 21:20:11 +02:00
|
|
|
# Guest user must be subscribed to the stream to access the message.
|
|
|
|
polonius = self.example_user("polonius")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(polonius, "Denmark")
|
2021-05-26 21:20:11 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
# any user can edit the topic of a message
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.EVERYONE)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "A", "polonius")
|
|
|
|
|
|
|
|
# only members can edit topic of a message
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.MEMBERS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "B", "You don't have permission to edit this message", "polonius"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "B", "cordelia")
|
|
|
|
|
|
|
|
# only full members can edit topic of a message
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.FULL_MEMBERS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-05-26 21:20:11 +02:00
|
|
|
do_set_realm_property(cordelia.realm, "waiting_period_threshold", 10, acting_user=None)
|
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
cordelia.date_joined = timezone_now() - timedelta(days=9)
|
2021-05-26 21:20:11 +02:00
|
|
|
cordelia.save()
|
2023-11-19 19:45:19 +01:00
|
|
|
hamlet.date_joined = timezone_now() - timedelta(days=9)
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet.save()
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "C", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
# User who sent the message but is not a full member cannot edit
|
|
|
|
# the topic
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "C", "You don't have permission to edit this message", "hamlet"
|
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
cordelia.date_joined = timezone_now() - timedelta(days=11)
|
2021-05-26 21:20:11 +02:00
|
|
|
cordelia.save()
|
2023-11-19 19:45:19 +01:00
|
|
|
hamlet.date_joined = timezone_now() - timedelta(days=11)
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet.save()
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "C", "cordelia")
|
2022-07-28 17:03:42 +02:00
|
|
|
do_edit_message_assert_success(id_, "CD", "hamlet")
|
2021-05-26 21:20:11 +02:00
|
|
|
|
|
|
|
# only moderators can edit topic of a message
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.MODERATORS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
# even user who sent the message but is not a moderator cannot edit the topic.
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "You don't have permission to edit this message", "hamlet"
|
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "D", "shiva")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# only admins can edit the topics of messages
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.ADMINS_ONLY)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_error(
|
2021-05-26 21:20:11 +02:00
|
|
|
id_, "E", "You don't have permission to edit this message", "shiva"
|
2021-05-29 11:45:43 +02:00
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-09-28 16:30:10 +02:00
|
|
|
# even owners and admins cannot edit the topics of messages
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.NOBODY)
|
2022-09-28 16:30:10 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "H", "You don't have permission to edit this message", "desdemona"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "H", "You don't have permission to edit this message", "iago"
|
|
|
|
)
|
|
|
|
|
2022-04-19 12:53:29 +02:00
|
|
|
# users can edit topics even if allow_message_editing is False
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(False, "unlimited", EditTopicPolicyEnum.EVERYONE)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "D", "cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-10-11 18:55:40 +02:00
|
|
|
# non-admin users cannot edit topics sent > 1 week ago including
|
2022-07-28 17:03:42 +02:00
|
|
|
# sender of the message.
|
2024-07-14 21:06:04 +02:00
|
|
|
message.date_sent -= timedelta(seconds=604900)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
2024-05-22 11:43:10 +02:00
|
|
|
set_message_editing_params(True, "unlimited", EditTopicPolicyEnum.EVERYONE)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "F", "shiva")
|
2021-06-04 03:52:39 +02:00
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "cordelia"
|
2021-06-04 03:52:39 +02:00
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "hamlet"
|
2022-07-28 17:03:42 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2023-03-16 13:32:30 +01:00
|
|
|
# topic edit permissions apply on "no topic" messages as well
|
|
|
|
message.set_topic_name("(no topic)")
|
|
|
|
message.save()
|
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "cordelia"
|
2023-03-16 13:32:30 +01:00
|
|
|
)
|
|
|
|
|
2022-10-11 18:55:40 +02:00
|
|
|
# set the topic edit limit to two weeks
|
|
|
|
do_set_realm_property(
|
|
|
|
hamlet.realm,
|
|
|
|
"move_messages_within_stream_limit_seconds",
|
|
|
|
604800 * 2,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "G", "cordelia")
|
|
|
|
do_edit_message_assert_success(id_, "H", "hamlet")
|
|
|
|
|
2024-05-24 14:08:41 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event_on_commit")
|
2024-03-06 18:10:25 +01:00
|
|
|
def test_topic_wildcard_mention_in_followed_topic(
|
|
|
|
self, mock_send_event: mock.MagicMock
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.login_user(hamlet)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
user_profile=hamlet,
|
|
|
|
stream=get_stream(stream_name, cordelia.realm),
|
|
|
|
topic_name="test",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.FOLLOWED,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
users_to_be_notified = sorted(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": hamlet.id,
|
|
|
|
"flags": ["read", "topic_wildcard_mentioned"],
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": cordelia.id,
|
|
|
|
"flags": [],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
key=itemgetter("id"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{message_id}",
|
|
|
|
{
|
|
|
|
"content": "Hello @**topic**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert 'topic_wildcard_mention_in_followed_topic_user_ids'
|
|
|
|
# has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(
|
|
|
|
arg_event["topic_wildcard_mention_in_followed_topic_user_ids"], [hamlet.id]
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2024-05-24 14:08:41 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event_on_commit")
|
2024-03-06 18:10:25 +01:00
|
|
|
def test_stream_wildcard_mention_in_followed_topic(
|
|
|
|
self, mock_send_event: mock.MagicMock
|
|
|
|
) -> None:
|
|
|
|
stream_name = "Macbeth"
|
2022-03-18 01:19:16 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2024-03-06 18:10:25 +01:00
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
2022-03-18 01:19:16 +01:00
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.login_user(hamlet)
|
2022-03-25 00:57:32 +01:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
do_set_user_topic_visibility_policy(
|
2022-03-18 01:19:16 +01:00
|
|
|
user_profile=hamlet,
|
2024-03-06 18:10:25 +01:00
|
|
|
stream=get_stream(stream_name, cordelia.realm),
|
|
|
|
topic_name="test",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.FOLLOWED,
|
2022-03-21 15:07:45 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
2022-03-21 15:07:45 +01:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
users_to_be_notified = sorted(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": hamlet.id,
|
|
|
|
"flags": ["read", "stream_wildcard_mentioned"],
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": cordelia.id,
|
|
|
|
"flags": ["stream_wildcard_mentioned"],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
key=itemgetter("id"),
|
2022-03-21 15:07:45 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{message_id}",
|
|
|
|
{
|
|
|
|
"content": "Hello @**all**",
|
|
|
|
},
|
2022-03-24 23:51:31 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.assert_json_success(result)
|
2023-03-17 15:09:50 +01:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert 'stream_wildcard_mention_in_followed_topic_user_ids'
|
|
|
|
# has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(
|
|
|
|
arg_event["stream_wildcard_mention_in_followed_topic_user_ids"], [hamlet.id]
|
2023-03-17 15:09:50 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
2023-03-17 15:09:50 +01:00
|
|
|
)
|
2024-03-06 18:10:25 +01:00
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
2023-03-17 15:09:50 +01:00
|
|
|
|
2024-05-24 14:08:41 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event_on_commit")
|
2024-03-06 18:10:25 +01:00
|
|
|
def test_topic_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
2023-03-17 15:09:50 +01:00
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
2024-03-06 18:10:25 +01:00
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
2023-03-17 15:09:50 +01:00
|
|
|
|
2024-03-06 18:10:25 +01:00
|
|
|
users_to_be_notified = sorted(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": hamlet.id,
|
|
|
|
"flags": ["read", "topic_wildcard_mentioned"],
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": cordelia.id,
|
|
|
|
"flags": [],
|
|
|
|
},
|
|
|
|
],
|
|
|
|
key=itemgetter("id"),
|
2023-08-16 06:16:49 +02:00
|
|
|
)
|
2023-06-07 19:19:33 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{message_id}",
|
|
|
|
{
|
|
|
|
"content": "Hello @**topic**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert topic_wildcard_mention_user_ids has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(arg_event["topic_wildcard_mention_user_ids"], [hamlet.id])
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
|
|
|
|
|
|
|
def test_topic_wildcard_mention_restrictions_when_editing(self) -> None:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
self.login("cordelia")
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.subscribe(shiva, stream_name)
|
|
|
|
message_id = self.send_stream_message(cordelia, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
realm = cordelia.realm
|
|
|
|
do_set_realm_property(
|
|
|
|
realm,
|
|
|
|
"wildcard_mention_policy",
|
2024-05-22 11:43:10 +02:00
|
|
|
WildcardMentionPolicyEnum.MODERATORS,
|
2023-06-07 19:19:33 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
2023-11-21 10:39:13 +01:00
|
|
|
# Less than 'Realm.WILDCARD_MENTION_THRESHOLD' participants
|
|
|
|
participants_user_ids = set(range(1, 10))
|
|
|
|
with mock.patch(
|
|
|
|
"zerver.actions.message_edit.participants_for_topic", return_value=participants_user_ids
|
|
|
|
):
|
2023-06-07 19:19:33 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**topic**",
|
|
|
|
},
|
|
|
|
)
|
2023-11-21 10:39:13 +01:00
|
|
|
self.assert_json_success(result)
|
2023-06-07 19:19:33 +02:00
|
|
|
|
2023-11-21 10:39:13 +01:00
|
|
|
# More than 'Realm.WILDCARD_MENTION_THRESHOLD' participants.
|
|
|
|
participants_user_ids = set(range(1, 20))
|
|
|
|
with mock.patch(
|
|
|
|
"zerver.actions.message_edit.participants_for_topic", return_value=participants_user_ids
|
|
|
|
):
|
2023-06-07 19:19:33 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**topic**",
|
|
|
|
},
|
|
|
|
)
|
2023-11-21 10:39:13 +01:00
|
|
|
self.assert_json_error(
|
|
|
|
result, "You do not have permission to use topic wildcard mentions in this topic."
|
|
|
|
)
|
2023-06-07 19:19:33 +02:00
|
|
|
|
2023-11-21 10:39:13 +01:00
|
|
|
# Shiva is moderator
|
2023-06-07 19:19:33 +02:00
|
|
|
self.login("shiva")
|
|
|
|
message_id = self.send_stream_message(shiva, stream_name, "Hi everyone")
|
2023-11-21 10:39:13 +01:00
|
|
|
with mock.patch(
|
|
|
|
"zerver.actions.message_edit.participants_for_topic", return_value=participants_user_ids
|
|
|
|
):
|
2023-06-07 19:19:33 +02:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**topic**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2024-05-24 14:08:41 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event_on_commit")
|
2023-06-03 16:51:38 +02:00
|
|
|
def test_stream_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
|
|
|
|
2023-09-30 12:40:39 +02:00
|
|
|
users_to_be_notified = sorted(
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": hamlet.id,
|
2023-11-03 15:20:44 +01:00
|
|
|
"flags": ["read", "stream_wildcard_mentioned"],
|
2023-09-30 12:40:39 +02:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": cordelia.id,
|
2023-11-03 15:20:44 +01:00
|
|
|
"flags": ["stream_wildcard_mentioned"],
|
2023-09-30 12:40:39 +02:00
|
|
|
},
|
|
|
|
],
|
|
|
|
key=itemgetter("id"),
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{message_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "Hello @**everyone**",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
2023-06-03 16:51:38 +02:00
|
|
|
# Here we assert 'stream_wildcard_mention_user_ids' has been set properly.
|
2020-07-03 11:09:37 +02:00
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(
|
|
|
|
arg_event["stream_wildcard_mention_user_ids"], [cordelia.id, hamlet.id]
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
|
|
|
|
2023-06-03 16:51:38 +02:00
|
|
|
def test_stream_wildcard_mention_restrictions_when_editing(self) -> None:
|
2021-12-06 18:40:30 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
self.login("cordelia")
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(shiva, stream_name)
|
2021-12-06 18:40:30 +01:00
|
|
|
message_id = self.send_stream_message(cordelia, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
realm = cordelia.realm
|
|
|
|
do_set_realm_property(
|
|
|
|
realm,
|
|
|
|
"wildcard_mention_policy",
|
2024-05-22 11:43:10 +02:00
|
|
|
WildcardMentionPolicyEnum.MODERATORS,
|
2021-12-06 18:40:30 +01:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
2024-04-16 21:27:36 +02:00
|
|
|
result, "You do not have permission to use channel wildcard mentions in this channel."
|
2021-12-06 18:40:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=14):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
message_id = self.send_stream_message(shiva, stream_name, "Hi everyone")
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2023-06-14 13:35:54 +02:00
|
|
|
def test_user_group_mention_restrictions_while_editing(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.subscribe(iago, "test_stream")
|
|
|
|
self.subscribe(shiva, "test_stream")
|
|
|
|
self.subscribe(othello, "test_stream")
|
|
|
|
self.subscribe(cordelia, "test_stream")
|
|
|
|
|
2024-09-25 11:51:28 +02:00
|
|
|
leadership = check_add_user_group(
|
|
|
|
othello.realm, "leadership", [othello], acting_user=othello
|
|
|
|
)
|
|
|
|
support = check_add_user_group(othello.realm, "support", [othello], acting_user=othello)
|
2023-06-14 13:35:54 +02:00
|
|
|
|
2024-04-18 12:23:46 +02:00
|
|
|
moderators_system_group = NamedUserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
realm=iago.realm, name=SystemGroups.MODERATORS, is_system_group=True
|
2023-06-14 13:35:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.login("cordelia")
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
leadership.can_mention_group = moderators_system_group
|
|
|
|
leadership.save()
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
2024-05-18 09:48:42 +02:00
|
|
|
f"You are not allowed to mention user group '{leadership.name}'.",
|
2023-06-14 13:35:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# The restriction does not apply on silent mention.
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @_*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("iago")
|
|
|
|
msg_id = self.send_stream_message(iago, "test_stream", "Test message")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2024-09-25 11:51:28 +02:00
|
|
|
test = check_add_user_group(shiva.realm, "test", [shiva], acting_user=shiva)
|
2023-06-14 13:35:54 +02:00
|
|
|
add_subgroups_to_user_group(leadership, [test], acting_user=None)
|
|
|
|
support.can_mention_group = leadership
|
|
|
|
support.save()
|
|
|
|
|
|
|
|
content = "Test mentioning user group @*support*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
2024-05-18 09:48:42 +02:00
|
|
|
f"You are not allowed to mention user group '{support.name}'.",
|
2023-06-14 13:35:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(othello, "test_stream", "Test message")
|
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(iago, "test_stream", "Test message")
|
|
|
|
content = "Test mentioning user group @*support* @*leadership*"
|
|
|
|
|
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
2024-05-18 09:48:42 +02:00
|
|
|
f"You are not allowed to mention user group '{support.name}'.",
|
2023-06-14 13:35:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(othello, "test_stream", "Test message")
|
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
2024-05-18 09:48:42 +02:00
|
|
|
f"You are not allowed to mention user group '{leadership.name}'.",
|
2023-06-14 13:35:54 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2024-05-18 09:48:42 +02:00
|
|
|
|
|
|
|
# Test all the cases when can_mention_group is not a named user group.
|
|
|
|
content = "Test mentioning user group @*leadership*"
|
2024-05-28 09:25:40 +02:00
|
|
|
user_group = self.create_or_update_anonymous_group_for_setting(
|
|
|
|
[othello], [moderators_system_group]
|
|
|
|
)
|
2024-05-18 09:48:42 +02:00
|
|
|
leadership.can_mention_group = user_group
|
|
|
|
leadership.save()
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(othello, "test_stream", "Test message")
|
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(iago, "test_stream", "Test message")
|
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, f"You are not allowed to mention user group '{leadership.name}'."
|
|
|
|
)
|
|
|
|
|
|
|
|
content = "Test mentioning user group @_*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2024-07-28 03:21:33 +02:00
|
|
|
|
|
|
|
def test_remove_attachment_while_editing(self) -> None:
|
|
|
|
# Try editing a message and removing an linked attachment that's
|
|
|
|
# uploaded by us. Users should be able to detach their own attachments
|
|
|
|
CONST_UPLOAD_PATH_PREFIX = "/user_uploads/"
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file1 = self.create_attachment_helper(user_profile)
|
|
|
|
|
|
|
|
content = f"Init message [attachment1.txt]({file1})"
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
# Create two messages referencing the same attachment.
|
|
|
|
original_msg_id = self.send_stream_message(
|
|
|
|
user_profile,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[original_msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
path_id_set = CONST_UPLOAD_PATH_PREFIX + attachments[0].path_id
|
|
|
|
self.assertEqual(path_id_set, file1)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
path_id_set = CONST_UPLOAD_PATH_PREFIX + attachments[0].path_id
|
|
|
|
self.assertEqual(path_id_set, file1)
|
|
|
|
|
|
|
|
# Try editing first message and removing one reference to the attachment.
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{original_msg_id}",
|
|
|
|
{
|
|
|
|
"content": "Try editing a message with an attachment",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
self.assert_length(result_content["detached_uploads"], 0)
|
|
|
|
|
|
|
|
# Try editing second message, the only reference to the attachment now
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "Try editing a message with an attachment",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
self.assert_length(result_content["detached_uploads"], 1)
|
|
|
|
actual_path_id_set = (
|
|
|
|
CONST_UPLOAD_PATH_PREFIX + result_content["detached_uploads"][0]["path_id"]
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(actual_path_id_set, file1)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "Try editing a message with no attachments",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
self.assert_length(result_content["detached_uploads"], 0)
|
|
|
|
|
|
|
|
def test_remove_another_user_attachment_while_editing(self) -> None:
|
|
|
|
# Try editing a message and removing an linked attachment that's
|
|
|
|
# uploaded by another user. Users should not be able to detach another
|
|
|
|
# user's attachments.
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file1 = self.create_attachment_helper(user_profile)
|
|
|
|
|
|
|
|
content = f"Init message [attachment1.txt]({file1})"
|
|
|
|
|
|
|
|
# Send a message with attachment using another user profile.
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content=content)
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
|
|
|
|
# Send a message referencing to the attachment uploaded by another user.
|
|
|
|
self.login("iago")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("iago"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content=content)
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
|
|
|
|
# Try editing the message and removing the reference to the attachment.
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "Try editing a message with an attachment uploaded by another user",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
self.assert_length(result_content["detached_uploads"], 0)
|
|
|
|
|
|
|
|
def test_remove_another_user_deleted_attachment_while_editing(self) -> None:
|
|
|
|
# Try editing a message and removing an linked attachment that's been
|
|
|
|
# uploaded and deleted by the original user. Users should not be able
|
|
|
|
# to detach another user's attachments.
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file1 = self.create_attachment_helper(user_profile)
|
|
|
|
|
|
|
|
content = f"Init message [attachment1.txt]({file1})"
|
|
|
|
|
|
|
|
# Send messages with the attachment on both users
|
|
|
|
original_msg_id = self.send_stream_message(
|
|
|
|
user_profile,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
self.check_message(original_msg_id, topic_name="editing", content=content)
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[original_msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("iago"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content=content)
|
|
|
|
attachments = Attachment.objects.filter(messages__in=[msg_id])
|
|
|
|
self.assert_length(attachments, 1)
|
|
|
|
|
|
|
|
# Delete the message reference from the attachment uploader
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_delete(f"/json/messages/{original_msg_id}")
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
|
|
|
|
# Try editing the message and removing the reference of the now deleted attachment.
|
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "Try editing a message with an attachment uploaded by another user",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result_content = orjson.loads(result.content)
|
|
|
|
self.assertEqual(result_content["result"], "success")
|
|
|
|
self.assert_length(result_content["detached_uploads"], 0)
|