2020-07-03 11:09:37 +02:00
|
|
|
import datetime
|
|
|
|
from operator import itemgetter
|
2022-06-08 04:52:09 +02:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
2020-07-03 11:09:37 +02:00
|
|
|
from unittest import mock
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-07-04 03:20:40 +02:00
|
|
|
from django.db import IntegrityError
|
2021-05-26 21:20:11 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.message_edit import (
|
2022-03-18 01:19:16 +01:00
|
|
|
check_update_message,
|
2021-06-03 15:46:13 +02:00
|
|
|
do_delete_messages,
|
2020-07-03 11:09:37 +02:00
|
|
|
do_update_message,
|
2022-04-19 01:12:26 +02:00
|
|
|
get_mentions_for_message_updates,
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.reactions import do_add_reaction
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.streams import do_change_stream_post_policy, do_deactivate_stream
|
|
|
|
from zerver.actions.users import do_change_user_role
|
2020-07-03 11:09:37 +02:00
|
|
|
from zerver.lib.message import MessageDict, has_message_access, messages_for_ids
|
2022-04-14 23:58:37 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase, get_topic_messages
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
from zerver.lib.test_helpers import cache_tries_captured, queries_captured
|
2022-03-02 02:25:25 +01:00
|
|
|
from zerver.lib.topic import RESOLVED_TOPIC_PREFIX, TOPIC_NAME
|
2022-03-18 01:19:16 +01:00
|
|
|
from zerver.lib.user_topics import (
|
|
|
|
get_topic_mutes,
|
|
|
|
get_users_muting_topic,
|
|
|
|
set_topic_mutes,
|
|
|
|
topic_is_muted,
|
|
|
|
)
|
2022-05-31 01:34:34 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2021-04-08 21:30:40 +02:00
|
|
|
from zerver.models import Message, Realm, Stream, UserMessage, UserProfile, get_realm, get_stream
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
class EditMessageTestCase(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_topic(self, msg_id: int, topic_name: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_message(self, msg_id: int, topic_name: str, content: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
# Make sure we saved the message correctly to the DB.
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
self.assertEqual(msg.content, content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
We assume our caller just edited a message.
|
|
|
|
|
|
|
|
Next, we will make sure we properly cached the messages. We still have
|
|
|
|
to do a query to hydrate recipient info, but we won't need to hit the
|
|
|
|
zerver_message table.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured(keep_cache_warm=True) as queries:
|
2020-07-03 11:09:37 +02:00
|
|
|
(fetch_message_dict,) = messages_for_ids(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids=[msg.id],
|
2020-07-03 11:09:37 +02:00
|
|
|
user_message_flags={msg_id: []},
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields={},
|
2020-07-03 11:09:37 +02:00
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
allow_edit_history=True,
|
|
|
|
)
|
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
self.assert_length(queries, 1)
|
2020-07-03 11:09:37 +02:00
|
|
|
for query in queries:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertNotIn("message", query["sql"])
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict[TOPIC_NAME],
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["content"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.content,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["sender_id"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.sender_id,
|
|
|
|
)
|
|
|
|
|
2022-02-14 17:04:39 +01:00
|
|
|
if msg.edit_history:
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict["edit_history"],
|
|
|
|
orjson.loads(msg.edit_history),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
def prepare_move_topics(
|
2021-10-04 22:04:38 +02:00
|
|
|
self,
|
|
|
|
user_email: str,
|
|
|
|
old_stream: str,
|
|
|
|
new_stream: str,
|
|
|
|
topic: str,
|
|
|
|
language: Optional[str] = None,
|
2021-05-26 11:46:31 +02:00
|
|
|
) -> Tuple[UserProfile, Stream, Stream, int, int]:
|
|
|
|
user_profile = self.example_user(user_email)
|
2021-10-04 22:04:38 +02:00
|
|
|
if language is not None:
|
|
|
|
user_profile.default_language = language
|
|
|
|
user_profile.save(update_fields=["default_language"])
|
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
self.login(user_email)
|
|
|
|
stream = self.make_stream(old_stream)
|
2022-07-01 13:43:47 +02:00
|
|
|
stream_to = self.make_stream(new_stream)
|
2021-05-26 11:46:31 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2022-07-01 13:43:47 +02:00
|
|
|
self.subscribe(user_profile, stream_to.name)
|
2021-05-26 11:46:31 +02:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="First"
|
|
|
|
)
|
|
|
|
msg_id_lt = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="Second"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name=topic, content="third")
|
|
|
|
|
2022-07-01 13:43:47 +02:00
|
|
|
return (user_profile, stream, stream_to, msg_id, msg_id_lt)
|
2021-05-26 11:46:31 +02:00
|
|
|
|
|
|
|
|
2021-05-27 06:24:06 +02:00
|
|
|
class EditMessagePayloadTest(EditMessageTestCase):
|
|
|
|
def test_edit_message_no_changes(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
2022-02-09 22:43:52 +01:00
|
|
|
{},
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
|
|
|
def test_move_message_cant_move_private_message(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
msg_id = self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", hamlet.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Private messages cannot be moved to streams.")
|
|
|
|
|
2021-05-26 12:04:30 +02:00
|
|
|
def test_private_message_edit_topic(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
msg_id = self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "Should not exist",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Private messages cannot have topics.")
|
|
|
|
|
2021-05-27 06:24:06 +02:00
|
|
|
def test_propagate_invalid(self) -> None:
|
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
2021-05-27 06:24:06 +02:00
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "invalid",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid propagate_mode")
|
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"content": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid propagate_mode without topic edit")
|
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
|
|
|
def test_edit_message_no_topic(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": " ",
|
|
|
|
},
|
|
|
|
)
|
2022-01-11 22:39:17 +01:00
|
|
|
self.assert_json_error(result, "Topic can't be empty!")
|
|
|
|
|
|
|
|
def test_edit_message_invalid_topic(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "editing\nfun",
|
|
|
|
},
|
|
|
|
)
|
2022-01-11 22:03:11 +01:00
|
|
|
self.assert_json_error(result, "Invalid character in topic, at position 8!")
|
2021-05-27 06:24:06 +02:00
|
|
|
|
|
|
|
def test_move_message_to_stream_with_content(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"content": "Not allowed",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Cannot change message content while changing stream")
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
# Right now, we prevent users from editing widgets.
|
|
|
|
def test_edit_submessage(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-05-27 06:24:06 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="/poll Games?\nYES\nNO",
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "/poll Games?\nYES\nNO\nMaybe",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Widgets cannot be edited.")
|
|
|
|
|
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
class EditMessageTest(EditMessageTestCase):
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_query_count_on_to_dict_uncached(self) -> None:
|
|
|
|
# `to_dict_uncached` method is used by the mechanisms
|
|
|
|
# tested in this class. Hence, its performance is tested here.
|
|
|
|
# Generate 2 messages
|
|
|
|
user = self.example_user("hamlet")
|
2021-03-08 11:39:48 +01:00
|
|
|
realm = user.realm
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(user)
|
|
|
|
stream_name = "public_stream"
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
message_ids = []
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user, stream_name, "Message one"))
|
2020-07-03 11:09:37 +02:00
|
|
|
user_2 = self.example_user("cordelia")
|
|
|
|
self.subscribe(user_2, stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user_2, stream_name, "Message two"))
|
2021-03-08 11:39:48 +01:00
|
|
|
self.subscribe(self.notification_bot(realm), stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(
|
2021-03-08 11:39:48 +01:00
|
|
|
self.send_stream_message(self.notification_bot(realm), stream_name, "Message three")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
messages = [
|
|
|
|
Message.objects.select_related().get(id=message_id) for message_id in message_ids
|
|
|
|
]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check number of queries performed
|
|
|
|
with queries_captured() as queries:
|
|
|
|
MessageDict.to_dict_uncached(messages)
|
|
|
|
# 1 query for realm_id per message = 3
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 5)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
realm_id = 2 # Fetched from stream object
|
|
|
|
# Check number of queries performed with realm_id
|
|
|
|
with queries_captured() as queries:
|
|
|
|
MessageDict.to_dict_uncached(messages, realm_id)
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 2)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_save_message(self) -> None:
|
|
|
|
"""This is also tested by a client test, but here we can verify
|
|
|
|
the cache against the database"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content="after edit")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(msg_id, topic_name="edited")
|
|
|
|
|
2022-03-02 05:33:20 +01:00
|
|
|
def test_fetch_message_from_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
from_user=self.example_user("hamlet"),
|
|
|
|
to_user=self.example_user("cordelia"),
|
2022-03-02 05:33:20 +01:00
|
|
|
content="Personal message",
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "Personal message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], msg_id)
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], [])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Send message to web-public stream where hamlet is not subscribed.
|
2022-03-02 05:33:20 +01:00
|
|
|
# This will test case of user having no `UserMessage` but having access
|
|
|
|
# to message.
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(self.example_user("cordelia"), web_public_stream.name)
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
self.example_user("cordelia"), web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read", "historical"])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Spectator should be able to fetch message in web-public stream.
|
2022-03-02 05:33:20 +01:00
|
|
|
self.logout()
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify default is apply_markdown=True
|
2022-06-07 01:37:01 +02:00
|
|
|
self.assertEqual(response_dict["message"]["content"], "<p>web-public message</p>")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify apply_markdown=False works correctly.
|
|
|
|
result = self.client_get(
|
|
|
|
"/json/messages/" + str(web_public_stream_msg_id), {"apply_markdown": "false"}
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["content"], "web-public message")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Test error cases
|
2022-03-02 05:33:20 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/999999")
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
def test_fetch_raw_message_spectator(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(user_profile, web_public_stream.name)
|
|
|
|
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
user_profile, web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
|
|
|
|
non_web_public_stream = self.make_stream("non-web-public-stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(user_profile, non_web_public_stream.name)
|
2021-09-20 10:34:10 +02:00
|
|
|
non_web_public_stream_msg_id = self.send_stream_message(
|
2022-04-28 05:15:11 +02:00
|
|
|
user_profile, non_web_public_stream.name, content="non-web-public message"
|
2021-09-20 10:34:10 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Generate a private message to use in verification.
|
|
|
|
private_message_id = self.send_personal_message(user_profile, user_profile)
|
|
|
|
|
|
|
|
invalid_message_id = private_message_id + 1000
|
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Confirm WEB_PUBLIC_STREAMS_ENABLED is enforced.
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2021-11-24 08:23:38 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "enable_spectator_access", False, acting_user=None
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
# Verify success with web-public stream and default SELF_HOSTED plan type.
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read"])
|
2021-09-20 10:34:10 +02:00
|
|
|
|
|
|
|
# Verify LIMITED plan type does not allow web-public access.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user_profile.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify works with STANDARD_FREE plan type too.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(
|
|
|
|
user_profile.realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None
|
|
|
|
)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
2021-09-20 10:34:10 +02:00
|
|
|
|
|
|
|
# Verify private messages are rejected.
|
|
|
|
result = self.client_get("/json/messages/" + str(private_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify an actual public stream is required.
|
|
|
|
result = self.client_get("/json/messages/" + str(non_web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify invalid message IDs are rejected with the same error message.
|
|
|
|
result = self.client_get("/json/messages/" + str(invalid_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify deactivated streams are rejected. This may change in the future.
|
|
|
|
do_deactivate_stream(web_public_stream, acting_user=None)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("public_stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mit_user = self.mit_user("sipbtest")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(mit_user)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}", subdomain="zephyr")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_fetch_raw_message_private_stream(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("private_stream", invite_only=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_no_permission(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
|
|
|
def test_edit_message_no_content(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": " ",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = Message.objects.filter(id=msg_id).values_list("content", flat=True)[0]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(content, "(deleted)")
|
|
|
|
|
|
|
|
def test_edit_message_history_disabled(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(user_profile.realm, "allow_edit_history", False, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content="content before edit",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Message edit history is disabled in this organization")
|
|
|
|
|
|
|
|
# Now verify that if we fetch the message directly, there's no
|
|
|
|
# edit history data attached.
|
2021-02-12 08:19:30 +01:00
|
|
|
messages_result = self.client_get(
|
|
|
|
"/json/messages", {"anchor": msg_id_1, "num_before": 0, "num_after": 10}
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(messages_result)
|
2020-10-30 01:18:43 +01:00
|
|
|
json_messages = orjson.loads(messages_result.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
for msg in json_messages["messages"]:
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertNotIn("edit_history", msg)
|
|
|
|
|
|
|
|
def test_edit_message_history(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="content before edit",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history_1[0]["rendered_content"], "<p>content before edit</p>")
|
|
|
|
self.assertEqual(message_history_1[1]["rendered_content"], "<p>content after edit</p>")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted">after</span> '
|
|
|
|
'<span class="highlight_text_deleted">before</span>'
|
2021-10-01 00:14:28 +02:00
|
|
|
" edit</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
# Check content of message before edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["prev_rendered_content"], "<p>content before edit</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Edits on new lines
|
|
|
|
msg_id_2 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:20:45 +01:00
|
|
|
content="content before edit, line 1\n\ncontent before edit, line 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
new_content_2 = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"content before edit, line 1\n"
|
|
|
|
"content after edit, line 2\n"
|
|
|
|
"content before edit, line 3"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result_2 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_2,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_2)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_2 = self.client_get(f"/json/messages/{msg_id_2}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_2 = orjson.loads(message_edit_history_2.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2 = json_response_2["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[0]["rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["rendered_content"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content before edit, line 1<br>\n"
|
|
|
|
"content after edit, line 2<br>\n"
|
|
|
|
"content before edit, line 3</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content before edit, line 1<br> "
|
2021-02-12 08:19:30 +01:00
|
|
|
'content <span class="highlight_text_inserted">after edit, line 2<br> '
|
2021-10-01 00:14:28 +02:00
|
|
|
"content</span> before edit, line 3</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["prev_rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-10-01 00:14:28 +02:00
|
|
|
def test_empty_message_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-10-01 00:14:28 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="We will edit this to render as empty.",
|
|
|
|
)
|
|
|
|
# Edit that manually to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
self.assert_json_success(
|
|
|
|
self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "We will edit this to also render as empty.",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# And again tweak to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
history = self.client_get("/json/messages/" + str(msg_id) + "/history")
|
|
|
|
message_history = orjson.loads(history.content)["message_history"]
|
|
|
|
self.assertEqual(message_history[0]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["content_html_diff"], "<div></div>")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_link(self) -> None:
|
|
|
|
# Link editing
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="Here is a link to [zulip](www.zulip.org).",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "Here is a link to [zulip](www.zulipchat.com)."
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[0]["rendered_content"],
|
|
|
|
"<p>Here is a link to " '<a href="http://www.zulip.org">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["rendered_content"],
|
|
|
|
"<p>Here is a link to " '<a href="http://www.zulipchat.com">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
'<div><p>Here is a link to <a href="http://www.zulipchat.com"'
|
2021-02-12 08:20:45 +01:00
|
|
|
">zulip "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
|
|
|
|
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
|
2021-10-01 00:14:28 +02:00
|
|
|
"</span> </a></p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_history_unedited(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="editing",
|
|
|
|
content="This message has not been edited.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
message_history = self.assert_json_success(result)["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_length(message_history, 1)
|
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
def test_mentions_for_message_updates(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.login_user(hamlet)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(hamlet, "Denmark")
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-11 16:26:54 +02:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
hamlet, "Denmark", content="@**Cordelia, Lear's daughter**"
|
2021-04-11 16:26:54 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
mention_user_ids = get_mentions_for_message_updates(msg_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(mention_user_ids, {cordelia.id})
|
|
|
|
|
|
|
|
def test_edit_cases(self) -> None:
|
|
|
|
"""This test verifies the accuracy of construction of Zulip's edit
|
|
|
|
history data structures."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2022-03-02 01:08:38 +01:00
|
|
|
stream_1 = self.make_stream("stream 1")
|
|
|
|
stream_2 = self.make_stream("stream 2")
|
|
|
|
stream_3 = self.make_stream("stream 3")
|
|
|
|
self.subscribe(hamlet, stream_1.name)
|
|
|
|
self.subscribe(hamlet, stream_2.name)
|
|
|
|
self.subscribe(hamlet, stream_3.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
2022-03-02 01:08:38 +01:00
|
|
|
self.example_user("hamlet"), "stream 1", topic_name="topic 1", content="content 1"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-03-02 01:08:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 1")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
2022-03-02 02:25:25 +01:00
|
|
|
{"timestamp", "prev_topic", "topic", "user_id"},
|
2022-03-02 01:08:38 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": stream_2.id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_1.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
|
|
|
self.assertEqual(set(history[0].keys()), {"timestamp", "prev_stream", "stream", "user_id"})
|
|
|
|
|
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 3",
|
|
|
|
"topic": "topic 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
2022-03-02 01:08:38 +01:00
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
2021-02-12 08:20:45 +01:00
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 4",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 4",
|
2022-03-02 01:08:38 +01:00
|
|
|
"stream_id": stream_3.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
|
|
|
"timestamp",
|
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
|
|
|
"prev_stream",
|
|
|
|
"stream",
|
|
|
|
"user_id",
|
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
# Now, we verify that all of the edits stored in the message.edit_history
|
|
|
|
# have the correct data structure
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[2]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[2]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[3]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[3]["prev_stream"], stream_1.id)
|
|
|
|
|
|
|
|
self.assertEqual(history[4]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[4]["topic"], "topic 2")
|
|
|
|
|
|
|
|
self.assertEqual(history[5]["prev_content"], "content 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Now, we verify that the edit history data sent back has the
|
|
|
|
# correct filled-out fields
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response = orjson.loads(message_edit_history.content)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# We reverse the message history view output so that the IDs line up with the above.
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history = list(reversed(json_response["message_history"]))
|
2020-07-03 11:09:37 +02:00
|
|
|
i = 0
|
|
|
|
for entry in message_history:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"}
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 2, 4}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_topic")
|
2022-03-02 01:08:38 +01:00
|
|
|
expected_entries.add("topic")
|
|
|
|
if i in {1, 2, 5}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_content")
|
|
|
|
expected_entries.add("prev_rendered_content")
|
|
|
|
expected_entries.add("content_html_diff")
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 3}:
|
|
|
|
expected_entries.add("prev_stream")
|
2022-02-14 17:04:39 +01:00
|
|
|
expected_entries.add("stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
i += 1
|
|
|
|
self.assertEqual(expected_entries, set(entry.keys()))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assert_length(message_history, 7)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["topic"], "topic 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_topic"], "topic 3")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[0]["stream"], stream_3.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_stream"], stream_2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["content"], "content 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[1]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[1]["content"], "content 4")
|
|
|
|
self.assertEqual(message_history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[2]["topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[2]["content"], "content 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[3]["topic"], "topic 2")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[3]["stream"], stream_2.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[3]["prev_stream"], stream_1.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[3]["content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[4]["topic"], "topic 2")
|
|
|
|
self.assertEqual(message_history[4]["prev_topic"], "topic 1")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[4]["content"], "content 2")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[5]["topic"], "topic 1")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[5]["content"], "content 2")
|
|
|
|
self.assertEqual(message_history[5]["prev_content"], "content 1")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[6]["content"], "content 1")
|
|
|
|
self.assertEqual(message_history[6]["topic"], "topic 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_content_limit(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
|
|
|
message_content_edit_limit_seconds: int,
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
|
|
|
"message_content_edit_limit_seconds": message_content_edit_limit_seconds,
|
2021-05-26 12:21:37 +02:00
|
|
|
"edit_topic_policy": edit_topic_policy,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_success(
|
|
|
|
id_: int, unique_str: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
if topic_only:
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
else:
|
|
|
|
self.check_message(id_, topic_name=new_topic, content=new_content)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
# test the various possible message editing settings
|
|
|
|
# high enough time limit, all edits allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 240, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "A")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# out of time, only topic editing allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 120, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
do_edit_message_assert_error(id_, "C", "The time limit for editing this message has passed")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# infinite time, all edits allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "D")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# without allow_message_editing, nothing is allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 240, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "E", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 120, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "F", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 0, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "G", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 12:21:37 +02:00
|
|
|
def test_edit_topic_policy(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
|
|
|
message_content_edit_limit_seconds: int,
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-05-29 11:28:28 +02:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
|
|
|
"message_content_edit_limit_seconds": message_content_edit_limit_seconds,
|
2021-05-26 12:21:37 +02:00
|
|
|
"edit_topic_policy": edit_topic_policy,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_success(id_: int, unique_str: str, acting_user: str) -> None:
|
|
|
|
self.login(acting_user)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, acting_user: str
|
|
|
|
) -> None:
|
|
|
|
self.login(acting_user)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
2021-05-26 21:20:11 +02:00
|
|
|
# Guest user must be subscribed to the stream to access the message.
|
|
|
|
polonius = self.example_user("polonius")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(polonius, "Denmark")
|
2021-05-26 21:20:11 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
# any user can edit the topic of a message
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_EVERYONE)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "A", "polonius")
|
|
|
|
|
|
|
|
# only members can edit topic of a message
|
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_MEMBERS_ONLY)
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "B", "You don't have permission to edit this message", "polonius"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "B", "cordelia")
|
|
|
|
|
|
|
|
# only full members can edit topic of a message
|
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_FULL_MEMBERS_ONLY)
|
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
do_set_realm_property(cordelia.realm, "waiting_period_threshold", 10, acting_user=None)
|
|
|
|
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
|
|
|
|
cordelia.save()
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "C", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
|
|
|
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
|
|
|
|
cordelia.save()
|
|
|
|
do_edit_message_assert_success(id_, "C", "cordelia")
|
|
|
|
|
|
|
|
# only moderators can edit topic of a message
|
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_MODERATORS_ONLY)
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "D", "shiva")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# only admins can edit the topics of messages
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_ADMINS_ONLY)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_error(
|
2021-05-26 21:20:11 +02:00
|
|
|
id_, "E", "You don't have permission to edit this message", "shiva"
|
2021-05-29 11:45:43 +02:00
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# users cannot edit topics if allow_message_editing is False
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 0, Realm.POLICY_EVERYONE)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "Your organization has turned off message editing", "cordelia"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-01-05 06:22:08 +01:00
|
|
|
# non-admin users cannot edit topics sent > 72 hrs ago
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=290000)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 0, Realm.POLICY_EVERYONE)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "F", "shiva")
|
2021-06-04 03:52:39 +02:00
|
|
|
do_edit_message_assert_error(
|
2021-05-26 21:20:11 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed", "cordelia"
|
2021-06-04 03:52:39 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# anyone should be able to edit "no topic" indefinitely
|
|
|
|
message.set_topic_name("(no topic)")
|
|
|
|
message.save()
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_success(id_, "D", "cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event")
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
|
|
|
|
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_update_message_topic_success(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
topic_name: str,
|
|
|
|
users_to_be_notified: List[Dict[str, Any]],
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
do_update_message(
|
|
|
|
user_profile=user_profile,
|
2021-05-10 06:10:32 +02:00
|
|
|
target_message=message,
|
2020-07-03 11:09:37 +02:00
|
|
|
new_stream=None,
|
|
|
|
topic_name=topic_name,
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
2021-06-17 12:20:40 +02:00
|
|
|
rendering_result=None,
|
2020-07-03 11:09:37 +02:00
|
|
|
prior_mention_user_ids=set(),
|
|
|
|
mention_data=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
|
|
|
|
|
|
|
|
# Returns the users that need to be notified when a message topic is changed
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
um = UserMessage.objects.get(message=message_id)
|
|
|
|
if um.user_profile_id == user_id:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": um.flags_list(),
|
|
|
|
}
|
|
|
|
|
|
|
|
else:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["read"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
# Edit topic of a message sent before Cordelia subscribed the stream
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Othello eats apple", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# If Cordelia is long-term idle, she doesn't get a notification.
|
|
|
|
cordelia.long_term_idle = True
|
|
|
|
cordelia.save()
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Another topic idle", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
cordelia.long_term_idle = False
|
|
|
|
cordelia.save()
|
|
|
|
|
|
|
|
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
|
|
|
|
# because he has a UserMessage row.
|
|
|
|
self.unsubscribe(hamlet, stream_name)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
|
|
|
|
|
|
|
|
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
|
|
|
|
# the message topic. Cordelia won't receive any updates when a message on that stream is
|
|
|
|
# changed because she is not a subscriber and doesn't have a UserMessage row.
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.unsubscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
|
|
|
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
|
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event")
|
2022-03-18 01:19:16 +01:00
|
|
|
def test_edit_muted_topic(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Stream 123"
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="Topic1", content="Hello World"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(aaron, stream_name)
|
|
|
|
self.login_user(aaron)
|
|
|
|
|
2022-03-25 00:57:32 +01:00
|
|
|
already_muted_topic = "Already muted topic"
|
2022-03-18 01:19:16 +01:00
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "Topic1"],
|
|
|
|
[stream_name, "Topic2"],
|
2022-03-25 00:57:32 +01:00
|
|
|
[stream_name, already_muted_topic],
|
2022-03-18 01:19:16 +01:00
|
|
|
]
|
|
|
|
set_topic_mutes(hamlet, muted_topics)
|
|
|
|
set_topic_mutes(cordelia, muted_topics)
|
|
|
|
|
|
|
|
# Returns the users that need to be notified when a message topic is changed
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
um = UserMessage.objects.get(message=message_id)
|
|
|
|
if um.user_profile_id == user_id:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": um.flags_list(),
|
|
|
|
}
|
|
|
|
|
|
|
|
else:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["read"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id, aaron.id]))
|
|
|
|
change_all_topic_name = "Topic 1 edited"
|
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_all_topic_name,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
# This code path adds 9 (1 + 4/user with muted topics) to
|
|
|
|
# the number of database queries for moving a topic.
|
|
|
|
self.assert_length(queries, 18)
|
|
|
|
|
|
|
|
for muting_user in get_users_muting_topic(stream.id, change_all_topic_name):
|
|
|
|
for user in users_to_be_notified:
|
|
|
|
if muting_user.id == user["id"]:
|
|
|
|
user["muted_topics"] = get_topic_mutes(muting_user)
|
|
|
|
break
|
|
|
|
|
|
|
|
self.assertFalse(topic_is_muted(hamlet, stream.id, "Topic1"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, stream.id, "Topic1"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, "Topic1"))
|
|
|
|
self.assertTrue(topic_is_muted(hamlet, stream.id, "Topic2"))
|
|
|
|
self.assertTrue(topic_is_muted(cordelia, stream.id, "Topic2"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, "Topic2"))
|
|
|
|
self.assertTrue(topic_is_muted(hamlet, stream.id, change_all_topic_name))
|
|
|
|
self.assertTrue(topic_is_muted(cordelia, stream.id, change_all_topic_name))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, change_all_topic_name))
|
|
|
|
|
|
|
|
change_later_topic_name = "Topic 1 edited again"
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_later_topic_name,
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
self.assertFalse(topic_is_muted(hamlet, stream.id, change_all_topic_name))
|
|
|
|
self.assertTrue(topic_is_muted(hamlet, stream.id, change_later_topic_name))
|
|
|
|
|
2022-03-25 00:57:32 +01:00
|
|
|
# Make sure we safely handle the case of the new topic being already muted.
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=already_muted_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
self.assertFalse(topic_is_muted(hamlet, stream.id, change_later_topic_name))
|
|
|
|
self.assertTrue(topic_is_muted(hamlet, stream.id, already_muted_topic))
|
|
|
|
|
2022-03-18 01:19:16 +01:00
|
|
|
change_one_topic_name = "Topic 1 edited change_one"
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_one_topic_name,
|
|
|
|
propagate_mode="change_one",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
2022-03-24 23:51:31 +01:00
|
|
|
self.assertTrue(topic_is_muted(hamlet, stream.id, change_one_topic_name))
|
|
|
|
self.assertFalse(topic_is_muted(hamlet, stream.id, change_later_topic_name))
|
2022-03-18 01:19:16 +01:00
|
|
|
|
2022-03-21 15:07:45 +01:00
|
|
|
# Move topic between two public streams.
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic", content="Hello World"
|
|
|
|
)
|
|
|
|
new_public_stream = self.make_stream("New public stream")
|
|
|
|
self.subscribe(desdemona, new_public_stream.name)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic"],
|
|
|
|
]
|
|
|
|
set_topic_mutes(desdemona, muted_topics)
|
|
|
|
set_topic_mutes(cordelia, muted_topics)
|
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
self.assert_length(queries, 31)
|
|
|
|
|
|
|
|
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic"))
|
|
|
|
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "New topic"))
|
|
|
|
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "New topic"))
|
|
|
|
|
|
|
|
# Move topic to a private stream.
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic", content="Hello World"
|
|
|
|
)
|
|
|
|
new_private_stream = self.make_stream("New private stream", invite_only=True)
|
|
|
|
self.subscribe(desdemona, new_private_stream.name)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic"],
|
|
|
|
]
|
|
|
|
set_topic_mutes(desdemona, muted_topics)
|
|
|
|
set_topic_mutes(cordelia, muted_topics)
|
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_private_stream.id,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
2022-03-23 05:09:26 +01:00
|
|
|
self.assert_length(queries, 33)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
|
|
|
# Cordelia is not subscribed to the private stream, so
|
|
|
|
# Cordelia should have had the topic unmuted, while Desdemona
|
|
|
|
# should have had her muted topic record moved.
|
|
|
|
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic"))
|
|
|
|
self.assertTrue(topic_is_muted(desdemona, new_private_stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, new_private_stream.id, "New topic"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, new_private_stream.id, "New topic"))
|
|
|
|
|
|
|
|
# Move topic between two public streams with change in topic name.
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic 2", content="Hello World"
|
|
|
|
)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic 2"],
|
|
|
|
]
|
|
|
|
set_topic_mutes(desdemona, muted_topics)
|
|
|
|
set_topic_mutes(cordelia, muted_topics)
|
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
topic_name="changed topic name",
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
self.assert_length(queries, 31)
|
|
|
|
|
|
|
|
self.assertFalse(topic_is_muted(desdemona, stream.id, "New topic 2"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, stream.id, "New topic 2"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, stream.id, "New topic 2"))
|
|
|
|
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "changed topic name"))
|
|
|
|
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "changed topic name"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "changed topic name"))
|
|
|
|
|
2022-03-24 23:51:31 +01:00
|
|
|
# Moving only half the messages doesn't move MutedTopic records.
|
|
|
|
second_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="changed topic name", content="Second message"
|
|
|
|
)
|
|
|
|
with queries_captured() as queries:
|
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=second_message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
topic_name="final topic name",
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
self.assert_length(queries, 25)
|
|
|
|
|
|
|
|
self.assertTrue(topic_is_muted(desdemona, new_public_stream.id, "changed topic name"))
|
|
|
|
self.assertTrue(topic_is_muted(cordelia, new_public_stream.id, "changed topic name"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "changed topic name"))
|
|
|
|
self.assertFalse(topic_is_muted(desdemona, new_public_stream.id, "final topic name"))
|
|
|
|
self.assertFalse(topic_is_muted(cordelia, new_public_stream.id, "final topic name"))
|
|
|
|
self.assertFalse(topic_is_muted(aaron, new_public_stream.id, "final topic name"))
|
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event")
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["wildcard_mentioned"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{message_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "Hello @**everyone**",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert wildcard_mention_user_ids has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(arg_event["wildcard_mention_user_ids"], [cordelia.id, hamlet.id])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
|
|
|
|
2021-12-06 18:40:30 +01:00
|
|
|
def test_wildcard_mention_restrictions_when_editing(self) -> None:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
self.login("cordelia")
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(shiva, stream_name)
|
2021-12-06 18:40:30 +01:00
|
|
|
message_id = self.send_stream_message(cordelia, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
realm = cordelia.realm
|
|
|
|
do_set_realm_property(
|
|
|
|
realm,
|
|
|
|
"wildcard_mention_policy",
|
|
|
|
Realm.WILDCARD_MENTION_POLICY_MODERATORS,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "You do not have permission to use wildcard mentions in this stream."
|
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=14):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
message_id = self.send_stream_message(shiva, stream_name, "Hi everyone")
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-12-28 11:30:07 +01:00
|
|
|
def test_topic_edit_history_saved_in_all_message(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
def verify_edit_history(new_topic: str, len_edit_history: int) -> None:
|
|
|
|
for msg_id in [id1, id2, id5]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
# Since edit history is being generated by do_update_message,
|
|
|
|
# it's contents can vary over time; So, to keep this test
|
|
|
|
# future proof, we only verify it's length.
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assert_length(
|
|
|
|
orjson.loads(assert_is_not_none(msg.edit_history)), len_edit_history
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
for msg_id in [id3, id4]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(msg.edit_history, None)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 1)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited2"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 2)
|
|
|
|
|
2021-04-22 07:23:04 +02:00
|
|
|
def test_topic_and_content_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 1", "topic")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", "message 2", "topic")
|
|
|
|
id3 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 3", "topic")
|
2021-04-22 07:23:04 +02:00
|
|
|
|
|
|
|
new_topic = "edited"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
"content": "edited message",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Content change of only id1 should come in edit history
|
|
|
|
# and topic change should be present in all the messages.
|
|
|
|
msg1 = Message.objects.get(id=id1)
|
|
|
|
msg2 = Message.objects.get(id=id2)
|
|
|
|
msg3 = Message.objects.get(id=id3)
|
|
|
|
|
2022-05-31 01:34:34 +02:00
|
|
|
msg1_edit_history = orjson.loads(assert_is_not_none(msg1.edit_history))
|
2021-04-22 07:23:04 +02:00
|
|
|
self.assertTrue("prev_content" in msg1_edit_history[0].keys())
|
|
|
|
|
|
|
|
for msg in [msg2, msg3]:
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assertFalse(
|
|
|
|
"prev_content" in orjson.loads(assert_is_not_none(msg.edit_history))[0].keys()
|
|
|
|
)
|
2021-04-22 07:23:04 +02:00
|
|
|
|
|
|
|
for msg in [msg1, msg2, msg3]:
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assert_length(orjson.loads(assert_is_not_none(msg.edit_history)), 1)
|
2021-04-22 07:23:04 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_propagate_topic_forward(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
|
|
|
|
def test_propagate_all_topics(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id6 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic3")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
self.check_topic(id6, topic_name="topic3")
|
|
|
|
|
|
|
|
def test_propagate_all_topics_with_different_uppercase_letters(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="Topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topiC1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="toPic1")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topiC1")
|
|
|
|
self.check_topic(id4, topic_name="edited")
|
|
|
|
|
|
|
|
def test_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-10-04 22:04:38 +02:00
|
|
|
"iago",
|
|
|
|
"test move stream",
|
|
|
|
"new stream",
|
|
|
|
"test",
|
|
|
|
# Set the user's translation language to German to test that
|
|
|
|
# it is overridden by the realm's default language.
|
|
|
|
"de",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2021-10-04 22:04:38 +02:00
|
|
|
HTTP_ACCEPT_LANGUAGE="de",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-09 15:31:07 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_to_another_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
lear_realm = get_realm("lear")
|
|
|
|
new_stream = self.make_stream("new", lear_realm)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2021-04-09 15:31:07 +02:00
|
|
|
|
|
|
|
def test_move_message_realm_admin_cant_move_to_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
new_stream = self.make_stream("new", invite_only=True)
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2021-04-09 15:31:07 +02:00
|
|
|
|
2021-04-10 17:04:53 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_from_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
self.make_stream("privatestream", invite_only=True)
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You don't have permission to move this message due to missing access to its stream",
|
|
|
|
)
|
|
|
|
|
2021-05-13 16:23:26 +02:00
|
|
|
def test_move_message_from_private_stream_message_access_checks(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
private_stream = self.make_stream(
|
|
|
|
"privatestream", invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
|
|
|
self.subscribe(hamlet, "privatestream")
|
|
|
|
original_msg_id = self.send_stream_message(hamlet, "privatestream", topic_name="test123")
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
new_msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
|
|
|
|
# Now we unsub and hamlet sends a new message (we won't have access to it even after re-subbing!)
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
new_inaccessible_msg_id = self.send_stream_message(
|
|
|
|
hamlet, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Re-subscribe and send another message:
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
newest_msg_id = self.send_stream_message(
|
|
|
|
user_profile, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(new_msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(Message.objects.get(id=new_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
self.assertEqual(Message.objects.get(id=newest_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
# The original message and the new, inaccessible message weren't moved,
|
|
|
|
# because user_profile doesn't have access to them.
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=original_msg_id).recipient_id, private_stream.recipient_id
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=new_inaccessible_msg_id).recipient_id,
|
|
|
|
private_stream.recipient_id,
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_change_later(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_later}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 2)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"2 messages were moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"2 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-02-09 21:48:30 +01:00
|
|
|
def test_move_message_to_stream_change_later_all_moved(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-01-15 22:33:32 +01:00
|
|
|
def test_move_message_to_stream_change_one(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"A message was moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
|
|
|
f"A message was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_to_stream_change_all(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def test_move_message_between_streams_policy_setting(self) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-04-08 21:30:40 +02:00
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def check_move_message_according_to_policy(role: int, expect_fail: bool = False) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if expect_fail:
|
|
|
|
self.assert_json_error(result, "You don't have permission to move this message")
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-04-08 21:30:40 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-04-08 21:30:40 +02:00
|
|
|
|
|
|
|
# Check sending messages when policy is Realm.POLICY_ADMINS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_ADMINS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-04-08 21:30:40 +02:00
|
|
|
# Check sending messages when policy is Realm.POLICY_MODERATORS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MODERATORS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_FULL_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_GUEST, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-04 21:58:01 +02:00
|
|
|
def test_move_message_to_stream_based_on_stream_post_policy(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
def check_move_message_to_stream(role: int, error_msg: Optional[str] = None) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if error_msg is not None:
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-05-04 21:58:01 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-05-04 21:58:01 +02:00
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_ADMINS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "Only organization administrators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_MODERATORS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_MODERATORS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "Only organization administrators and moderators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "New members cannot send to this stream."
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
|
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_EVERYONE.
|
|
|
|
# In this case also, guest is not allowed as we do not allow guest to move
|
|
|
|
# messages between streams in any case, so stream_post_policy of new stream does
|
|
|
|
# not matter.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_EVERYONE, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(
|
|
|
|
UserProfile.ROLE_GUEST, "You don't have permission to move this message"
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
2021-05-26 09:16:57 +02:00
|
|
|
def test_move_message_to_stream_with_topic_editing_not_allowed(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
realm = user_profile.realm
|
2021-05-26 12:21:37 +02:00
|
|
|
realm.edit_topic_policy = Realm.POLICY_ADMINS_ONLY
|
2021-05-26 09:16:57 +02:00
|
|
|
realm.save()
|
|
|
|
self.login("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-05-26 09:16:57 +02:00
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_and_topic(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured() as queries, cache_tries_captured() as cache_tries:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2022-03-21 15:07:45 +01:00
|
|
|
self.assert_length(queries, 53)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(cache_tries, 13)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>new topic** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "new topic")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_inaccessible_msg_after_stream_change(self) -> None:
|
|
|
|
"""Simulates the case where message is moved to a stream where user is not a subscribed"""
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
non_guest_user = self.example_user("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(guest_user, old_stream.name)
|
|
|
|
self.subscribe(non_guest_user, old_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_to_test_acesss = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile, old_stream.name, topic_name="test", content="fourth"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user, Message.objects.get(id=msg_id_to_test_acesss), has_user_message=False
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
False,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
# If the guest user were subscribed to the new stream,
|
|
|
|
# they'd have access; has_message_access does not validate
|
|
|
|
# the is_subscribed parameter.
|
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
2021-05-12 23:07:07 +02:00
|
|
|
has_user_message=False,
|
2021-05-12 00:31:03 +02:00
|
|
|
stream=new_stream,
|
|
|
|
is_subscribed=True,
|
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=new_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
False,
|
|
|
|
)
|
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
# Raises assertion if you pass an invalid stream.
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=non_guest_user.id,
|
|
|
|
message_id=msg_id_to_test_acesss,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
self.example_user("iago"),
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_no_notify_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_new_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "true",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_old_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def parameterized_test_move_message_involving_private_stream(
|
|
|
|
self,
|
|
|
|
from_invite_only: bool,
|
|
|
|
history_public_to_subscribers: bool,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created: bool,
|
2021-02-12 08:19:30 +01:00
|
|
|
to_invite_only: bool = True,
|
2020-09-03 20:57:49 +02:00
|
|
|
) -> None:
|
|
|
|
admin_user = self.example_user("iago")
|
2021-02-12 08:20:45 +01:00
|
|
|
user_losing_access = self.example_user("cordelia")
|
|
|
|
user_gaining_access = self.example_user("hamlet")
|
2020-09-03 20:57:49 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login("iago")
|
2020-09-03 20:57:49 +02:00
|
|
|
old_stream = self.make_stream("test move stream", invite_only=from_invite_only)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_stream = self.make_stream(
|
|
|
|
"new stream",
|
|
|
|
invite_only=to_invite_only,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
self.subscribe(admin_user, old_stream.name)
|
|
|
|
self.subscribe(user_losing_access, old_stream.name)
|
|
|
|
|
|
|
|
self.subscribe(admin_user, new_stream.name)
|
|
|
|
self.subscribe(user_gaining_access, new_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
admin_user, old_stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(admin_user, old_stream.name, topic_name="test", content="Second")
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
self.assert_json_success(result)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{admin_user.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
# When the history is shared, UserMessage is not created for the user but the user
|
|
|
|
# can see the message.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1 if user_messages_created else 0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
def test_move_message_from_public_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def test_move_message_from_public_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_public(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
to_invite_only=False,
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2021-04-08 19:33:22 +02:00
|
|
|
def test_can_move_messages_between_streams(self) -> None:
|
|
|
|
def validation_func(user_profile: UserProfile) -> bool:
|
|
|
|
user_profile.refresh_from_db()
|
|
|
|
return user_profile.can_move_messages_between_streams()
|
|
|
|
|
|
|
|
self.check_has_permission_policies("move_messages_between_streams_policy", validation_func)
|
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
def test_mark_topic_as_resolved(self) -> None:
|
|
|
|
self.login("iago")
|
|
|
|
admin_user = self.example_user("iago")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2022-01-29 01:22:09 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
2021-10-04 22:16:14 +02:00
|
|
|
# Set the user's translation language to German to test that
|
|
|
|
# it is overridden by the realm's default language.
|
|
|
|
admin_user.default_language = "de"
|
|
|
|
admin_user.save()
|
2021-06-11 20:50:03 +02:00
|
|
|
stream = self.make_stream("new")
|
|
|
|
self.subscribe(admin_user, stream.name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(hamlet, stream.name)
|
2022-01-29 01:22:09 +01:00
|
|
|
self.subscribe(cordelia, stream.name)
|
|
|
|
self.subscribe(aaron, stream.name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
original_topic = "topic 1"
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(hamlet, "new", topic_name=original_topic)
|
2021-06-11 20:50:03 +02:00
|
|
|
id2 = self.send_stream_message(admin_user, "new", topic_name=original_topic)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
msg1 = Message.objects.get(id=id1)
|
|
|
|
do_add_reaction(aaron, msg1, "tada", "1f389", "unicode_emoji")
|
|
|
|
|
2021-06-21 21:11:32 +02:00
|
|
|
# Check that we don't incorrectly send "unresolve topic"
|
|
|
|
# notifications when asking the preserve the current topic.
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": original_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
resolved_topic = RESOLVED_TOPIC_PREFIX + original_topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": resolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
2021-10-04 22:16:14 +02:00
|
|
|
HTTP_ACCEPT_LANGUAGE="de",
|
2021-06-11 20:50:03 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
resolved_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, resolved_topic)
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
# Check topic resolved notification message is only unread for participants.
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[2].id
|
|
|
|
)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 3
|
|
|
|
)
|
|
|
|
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[2].id)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 0
|
|
|
|
)
|
|
|
|
|
2021-06-21 21:27:26 +02:00
|
|
|
# Now move to a weird state and confirm no new messages
|
|
|
|
weird_topic = "✔ ✔✔" + original_topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": weird_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
weird_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, weird_topic)
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
unresolved_topic = original_topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": unresolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
unresolved_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, unresolved_topic)
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as unresolved.",
|
|
|
|
)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
# Check topic unresolved notification message is only unread for participants.
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[3].id
|
|
|
|
)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 3
|
|
|
|
)
|
|
|
|
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[3].id)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 0
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-04 03:20:40 +02:00
|
|
|
class DeleteMessageTest(ZulipTestCase):
|
|
|
|
def test_delete_message_invalid_request_format(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id + 1}", {"message_id": msg_id})
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_delete_message_by_user(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_deleting_params(
|
2021-06-08 13:45:14 +02:00
|
|
|
delete_own_message_policy: int, message_content_delete_limit_seconds: Union[int, str]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-06-08 13:45:14 +02:00
|
|
|
"delete_own_message_policy": delete_own_message_policy,
|
2021-06-14 18:49:28 +02:00
|
|
|
"message_content_delete_limit_seconds": orjson.dumps(
|
|
|
|
message_content_delete_limit_seconds
|
|
|
|
).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_admin(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_owner(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_other_user(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
# Test if message deleting is not allowed(default).
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_ADMINS_ONLY, "unlimited")
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.login_user(hamlet)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if message deleting is allowed.
|
2021-06-14 18:49:28 +02:00
|
|
|
# Test if time limit is None(no limit).
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_EVERYONE, "unlimited")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if time limit is non-zero.
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_EVERYONE, 240)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id_1 = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id_1)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
|
|
|
|
message.save()
|
|
|
|
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id_2)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id_1)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_1)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_2)
|
|
|
|
self.assert_json_error(result, "The time limit for deleting this message has passed")
|
|
|
|
|
|
|
|
# No limit for admin.
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id_2)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test multiple delete requests with no latency issues
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
|
|
|
|
|
|
|
# Test handling of 500 error caused by multiple delete requests due to latency.
|
|
|
|
# see issue #11219.
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch("zerver.views.message_edit.do_delete_messages") as m, mock.patch(
|
|
|
|
"zerver.views.message_edit.validate_can_delete_message", return_value=None
|
|
|
|
), mock.patch("zerver.views.message_edit.access_message", return_value=(None, None)):
|
2020-07-04 03:20:40 +02:00
|
|
|
m.side_effect = IntegrityError()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|
|
|
|
m.side_effect = Message.DoesNotExist()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|
2021-06-03 15:46:13 +02:00
|
|
|
|
2021-06-21 18:52:51 +02:00
|
|
|
def test_delete_message_according_to_delete_own_message_policy(self) -> None:
|
|
|
|
def check_delete_message_by_sender(
|
|
|
|
sender_name: str, error_msg: Optional[str] = None
|
|
|
|
) -> None:
|
|
|
|
sender = self.example_user(sender_name)
|
|
|
|
msg_id = self.send_stream_message(sender, "Verona")
|
|
|
|
self.login_user(sender)
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
|
|
|
if error_msg is None:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
else:
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("shiva", "You don't have permission to delete this message")
|
|
|
|
check_delete_message_by_sender("iago")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"cordelia", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("shiva")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"polonius", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
do_set_realm_property(realm, "waiting_period_threshold", 10, acting_user=None)
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
|
|
|
|
cordelia.save()
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"cordelia", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
|
|
|
|
cordelia.save()
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_EVERYONE, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
check_delete_message_by_sender("polonius")
|
|
|
|
|
2021-06-03 15:46:13 +02:00
|
|
|
def test_delete_event_sent_after_transaction_commits(self) -> None:
|
|
|
|
"""
|
|
|
|
Tests that `send_event` is hooked to `transaction.on_commit`. This is important, because
|
|
|
|
we don't want to end up holding locks on message rows for too long if the event queue runs
|
|
|
|
into a problem.
|
|
|
|
"""
|
|
|
|
hamlet = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.send_stream_message(hamlet, "Denmark")
|
2021-06-03 15:46:13 +02:00
|
|
|
message = self.get_last_message()
|
|
|
|
|
|
|
|
with self.tornado_redirected_to_list([], expected_num_events=1):
|
2022-04-14 23:55:52 +02:00
|
|
|
with mock.patch("zerver.actions.message_edit.send_event") as m:
|
2021-06-03 15:46:13 +02:00
|
|
|
m.side_effect = AssertionError(
|
|
|
|
"Events should be sent only after the transaction commits."
|
|
|
|
)
|
|
|
|
do_delete_messages(hamlet.realm, [message])
|