2020-07-03 11:09:37 +02:00
|
|
|
import datetime
|
|
|
|
from operator import itemgetter
|
2022-06-08 04:52:09 +02:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
2020-07-03 11:09:37 +02:00
|
|
|
from unittest import mock
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-07-04 03:20:40 +02:00
|
|
|
from django.db import IntegrityError
|
2021-05-26 21:20:11 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.message_edit import (
|
2022-03-18 01:19:16 +01:00
|
|
|
check_update_message,
|
2020-07-03 11:09:37 +02:00
|
|
|
do_update_message,
|
2022-04-19 01:12:26 +02:00
|
|
|
get_mentions_for_message_updates,
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.reactions import do_add_reaction
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.streams import do_change_stream_post_policy, do_deactivate_stream
|
2023-06-14 13:35:54 +02:00
|
|
|
from zerver.actions.user_groups import add_subgroups_to_user_group, check_add_user_group
|
2023-04-09 20:58:00 +02:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2022-04-14 23:55:52 +02:00
|
|
|
from zerver.actions.users import do_change_user_role
|
2022-11-12 23:49:11 +01:00
|
|
|
from zerver.lib.message import MessageDict, has_message_access, messages_for_ids, truncate_topic
|
2022-04-14 23:58:37 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase, get_topic_messages
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
from zerver.lib.test_helpers import cache_tries_captured, queries_captured
|
2022-03-02 02:25:25 +01:00
|
|
|
from zerver.lib.topic import RESOLVED_TOPIC_PREFIX, TOPIC_NAME
|
2023-03-12 18:12:10 +01:00
|
|
|
from zerver.lib.url_encoding import near_stream_message_url
|
2022-03-18 01:19:16 +01:00
|
|
|
from zerver.lib.user_topics import (
|
2023-03-17 15:09:50 +01:00
|
|
|
get_users_with_user_topic_visibility_policy,
|
2023-03-26 15:36:01 +02:00
|
|
|
set_topic_visibility_policy,
|
|
|
|
topic_has_visibility_policy,
|
2022-03-18 01:19:16 +01:00
|
|
|
)
|
2022-05-31 01:34:34 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2022-11-12 23:49:11 +01:00
|
|
|
from zerver.models import (
|
|
|
|
MAX_TOPIC_NAME_LENGTH,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Stream,
|
2023-06-14 13:35:54 +02:00
|
|
|
UserGroup,
|
2022-11-12 23:49:11 +01:00
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
2023-03-14 15:09:12 +01:00
|
|
|
UserTopic,
|
2022-11-12 23:49:11 +01:00
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
class EditMessageTestCase(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_topic(self, msg_id: int, topic_name: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_message(self, msg_id: int, topic_name: str, content: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
# Make sure we saved the message correctly to the DB.
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
self.assertEqual(msg.content, content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
We assume our caller just edited a message.
|
|
|
|
|
|
|
|
Next, we will make sure we properly cached the messages. We still have
|
|
|
|
to do a query to hydrate recipient info, but we won't need to hit the
|
|
|
|
zerver_message table.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured(keep_cache_warm=True) as queries:
|
2020-07-03 11:09:37 +02:00
|
|
|
(fetch_message_dict,) = messages_for_ids(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids=[msg.id],
|
2020-07-03 11:09:37 +02:00
|
|
|
user_message_flags={msg_id: []},
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields={},
|
2020-07-03 11:09:37 +02:00
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
allow_edit_history=True,
|
|
|
|
)
|
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
self.assert_length(queries, 1)
|
2020-07-03 11:09:37 +02:00
|
|
|
for query in queries:
|
2023-06-06 23:54:19 +02:00
|
|
|
self.assertNotIn("message", query.sql)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict[TOPIC_NAME],
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["content"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.content,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["sender_id"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.sender_id,
|
|
|
|
)
|
|
|
|
|
2022-02-14 17:04:39 +01:00
|
|
|
if msg.edit_history:
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict["edit_history"],
|
|
|
|
orjson.loads(msg.edit_history),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
def prepare_move_topics(
|
2021-10-04 22:04:38 +02:00
|
|
|
self,
|
|
|
|
user_email: str,
|
|
|
|
old_stream: str,
|
|
|
|
new_stream: str,
|
|
|
|
topic: str,
|
|
|
|
language: Optional[str] = None,
|
2021-05-26 11:46:31 +02:00
|
|
|
) -> Tuple[UserProfile, Stream, Stream, int, int]:
|
|
|
|
user_profile = self.example_user(user_email)
|
2021-10-04 22:04:38 +02:00
|
|
|
if language is not None:
|
|
|
|
user_profile.default_language = language
|
|
|
|
user_profile.save(update_fields=["default_language"])
|
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
self.login(user_email)
|
|
|
|
stream = self.make_stream(old_stream)
|
2022-07-01 13:43:47 +02:00
|
|
|
stream_to = self.make_stream(new_stream)
|
2021-05-26 11:46:31 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2022-07-01 13:43:47 +02:00
|
|
|
self.subscribe(user_profile, stream_to.name)
|
2021-05-26 11:46:31 +02:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="First"
|
|
|
|
)
|
|
|
|
msg_id_lt = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="Second"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name=topic, content="third")
|
|
|
|
|
2022-07-01 13:43:47 +02:00
|
|
|
return (user_profile, stream, stream_to, msg_id, msg_id_lt)
|
2021-05-26 11:46:31 +02:00
|
|
|
|
|
|
|
|
2021-05-27 06:24:06 +02:00
|
|
|
class EditMessagePayloadTest(EditMessageTestCase):
|
|
|
|
def test_edit_message_no_changes(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
2022-02-09 22:43:52 +01:00
|
|
|
{},
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
|
|
|
def test_move_message_cant_move_private_message(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
msg_id = self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", hamlet.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-01-24 15:36:03 +01:00
|
|
|
self.assert_json_error(result, "Direct messages cannot be moved to streams.")
|
2021-05-27 06:24:06 +02:00
|
|
|
|
2021-05-26 12:04:30 +02:00
|
|
|
def test_private_message_edit_topic(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
msg_id = self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "Should not exist",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-01-24 15:36:03 +01:00
|
|
|
self.assert_json_error(result, "Direct messages cannot have topics.")
|
2021-05-26 12:04:30 +02:00
|
|
|
|
2021-05-27 06:24:06 +02:00
|
|
|
def test_propagate_invalid(self) -> None:
|
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
2021-05-27 06:24:06 +02:00
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "invalid",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid propagate_mode")
|
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"content": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Invalid propagate_mode without topic edit")
|
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
|
|
|
def test_edit_message_no_topic(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-05-27 06:24:06 +02:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": " ",
|
|
|
|
},
|
|
|
|
)
|
2022-01-11 22:39:17 +01:00
|
|
|
self.assert_json_error(result, "Topic can't be empty!")
|
|
|
|
|
|
|
|
def test_edit_message_invalid_topic(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "editing\nfun",
|
|
|
|
},
|
|
|
|
)
|
2022-01-11 22:03:11 +01:00
|
|
|
self.assert_json_error(result, "Invalid character in topic, at position 8!")
|
2021-05-27 06:24:06 +02:00
|
|
|
|
|
|
|
def test_move_message_to_stream_with_content(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"content": "Not allowed",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Cannot change message content while changing stream")
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
# Right now, we prevent users from editing widgets.
|
|
|
|
def test_edit_submessage(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-05-27 06:24:06 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="/poll Games?\nYES\nNO",
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "/poll Games?\nYES\nNO\nMaybe",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Widgets cannot be edited.")
|
|
|
|
|
|
|
|
|
2021-05-26 11:46:31 +02:00
|
|
|
class EditMessageTest(EditMessageTestCase):
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_query_count_on_to_dict_uncached(self) -> None:
|
|
|
|
# `to_dict_uncached` method is used by the mechanisms
|
|
|
|
# tested in this class. Hence, its performance is tested here.
|
|
|
|
# Generate 2 messages
|
|
|
|
user = self.example_user("hamlet")
|
2021-03-08 11:39:48 +01:00
|
|
|
realm = user.realm
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(user)
|
|
|
|
stream_name = "public_stream"
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
message_ids = []
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user, stream_name, "Message one"))
|
2020-07-03 11:09:37 +02:00
|
|
|
user_2 = self.example_user("cordelia")
|
|
|
|
self.subscribe(user_2, stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user_2, stream_name, "Message two"))
|
2021-03-08 11:39:48 +01:00
|
|
|
self.subscribe(self.notification_bot(realm), stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(
|
2021-03-08 11:39:48 +01:00
|
|
|
self.send_stream_message(self.notification_bot(realm), stream_name, "Message three")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
messages = [
|
|
|
|
Message.objects.select_related().get(id=message_id) for message_id in message_ids
|
|
|
|
]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check number of queries performed
|
|
|
|
# 1 query for realm_id per message = 3
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(5):
|
|
|
|
MessageDict.to_dict_uncached(messages)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
realm_id = 2 # Fetched from stream object
|
|
|
|
# Check number of queries performed with realm_id
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
|
|
|
MessageDict.to_dict_uncached(messages, realm_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_save_message(self) -> None:
|
|
|
|
"""This is also tested by a client test, but here we can verify
|
|
|
|
the cache against the database"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content="after edit")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(msg_id, topic_name="edited")
|
|
|
|
|
2022-03-02 05:33:20 +01:00
|
|
|
def test_fetch_message_from_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
from_user=self.example_user("hamlet"),
|
|
|
|
to_user=self.example_user("cordelia"),
|
2022-03-02 05:33:20 +01:00
|
|
|
content="Personal message",
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "Personal message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], msg_id)
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], [])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Send message to web-public stream where hamlet is not subscribed.
|
2022-03-02 05:33:20 +01:00
|
|
|
# This will test case of user having no `UserMessage` but having access
|
|
|
|
# to message.
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(self.example_user("cordelia"), web_public_stream.name)
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
self.example_user("cordelia"), web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read", "historical"])
|
2022-03-02 05:33:20 +01:00
|
|
|
|
2022-04-28 05:05:04 +02:00
|
|
|
# Spectator should be able to fetch message in web-public stream.
|
2022-03-02 05:33:20 +01:00
|
|
|
self.logout()
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["id"], web_public_stream_msg_id)
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify default is apply_markdown=True
|
2022-06-07 01:37:01 +02:00
|
|
|
self.assertEqual(response_dict["message"]["content"], "<p>web-public message</p>")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
# Verify apply_markdown=False works correctly.
|
|
|
|
result = self.client_get(
|
|
|
|
"/json/messages/" + str(web_public_stream_msg_id), {"apply_markdown": "false"}
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["content"], "web-public message")
|
2022-03-02 05:33:20 +01:00
|
|
|
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Test error cases
|
2022-03-02 05:33:20 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/999999")
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
def test_fetch_raw_message_spectator(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
web_public_stream = self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(user_profile, web_public_stream.name)
|
|
|
|
|
|
|
|
web_public_stream_msg_id = self.send_stream_message(
|
|
|
|
user_profile, web_public_stream.name, content="web-public message"
|
|
|
|
)
|
|
|
|
|
|
|
|
non_web_public_stream = self.make_stream("non-web-public-stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(user_profile, non_web_public_stream.name)
|
2021-09-20 10:34:10 +02:00
|
|
|
non_web_public_stream_msg_id = self.send_stream_message(
|
2022-04-28 05:15:11 +02:00
|
|
|
user_profile, non_web_public_stream.name, content="non-web-public message"
|
2021-09-20 10:34:10 +02:00
|
|
|
)
|
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Generate a direct message to use in verification.
|
2021-09-20 10:34:10 +02:00
|
|
|
private_message_id = self.send_personal_message(user_profile, user_profile)
|
|
|
|
|
|
|
|
invalid_message_id = private_message_id + 1000
|
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Confirm WEB_PUBLIC_STREAMS_ENABLED is enforced.
|
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2021-11-24 08:23:38 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "enable_spectator_access", False, acting_user=None
|
|
|
|
)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
# Verify success with web-public stream and default SELF_HOSTED plan type.
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
|
|
|
self.assertEqual(response_dict["message"]["flags"], ["read"])
|
2021-09-20 10:34:10 +02:00
|
|
|
|
|
|
|
# Verify LIMITED plan type does not allow web-public access.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user_profile.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2022-06-02 15:56:30 +02:00
|
|
|
do_set_realm_property(user_profile.realm, "enable_spectator_access", True, acting_user=None)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2021-09-20 10:34:10 +02:00
|
|
|
# Verify works with STANDARD_FREE plan type too.
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(
|
|
|
|
user_profile.realm, Realm.PLAN_TYPE_STANDARD_FREE, acting_user=None
|
|
|
|
)
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertEqual(response_dict["raw_content"], "web-public message")
|
2021-09-20 10:34:10 +02:00
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Verify direct messages are rejected.
|
2021-09-20 10:34:10 +02:00
|
|
|
result = self.client_get("/json/messages/" + str(private_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify an actual public stream is required.
|
|
|
|
result = self.client_get("/json/messages/" + str(non_web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify invalid message IDs are rejected with the same error message.
|
|
|
|
result = self.client_get("/json/messages/" + str(invalid_message_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify deactivated streams are rejected. This may change in the future.
|
|
|
|
do_deactivate_stream(web_public_stream, acting_user=None)
|
|
|
|
result = self.client_get("/json/messages/" + str(web_public_stream_msg_id))
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("public_stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mit_user = self.mit_user("sipbtest")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(mit_user)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}", subdomain="zephyr")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_fetch_raw_message_private_stream(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("private_stream", invite_only=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_no_permission(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
2023-05-09 20:22:54 +02:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
do_set_realm_property(realm, "allow_message_editing", False, acting_user=None)
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"content": "content after edit",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Your organization has turned off message editing")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_message_no_content(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", topic_name="editing", content="before edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": " ",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = Message.objects.filter(id=msg_id).values_list("content", flat=True)[0]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(content, "(deleted)")
|
|
|
|
|
|
|
|
def test_edit_message_history_disabled(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(user_profile.realm, "allow_edit_history", False, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content="content before edit",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Message edit history is disabled in this organization")
|
|
|
|
|
|
|
|
# Now verify that if we fetch the message directly, there's no
|
|
|
|
# edit history data attached.
|
2021-02-12 08:19:30 +01:00
|
|
|
messages_result = self.client_get(
|
|
|
|
"/json/messages", {"anchor": msg_id_1, "num_before": 0, "num_after": 10}
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(messages_result)
|
2020-10-30 01:18:43 +01:00
|
|
|
json_messages = orjson.loads(messages_result.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
for msg in json_messages["messages"]:
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertNotIn("edit_history", msg)
|
|
|
|
|
|
|
|
def test_edit_message_history(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="content before edit",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history_1[0]["rendered_content"], "<p>content before edit</p>")
|
|
|
|
self.assertEqual(message_history_1[1]["rendered_content"], "<p>content after edit</p>")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted">after</span> '
|
|
|
|
'<span class="highlight_text_deleted">before</span>'
|
2021-10-01 00:14:28 +02:00
|
|
|
" edit</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
# Check content of message before edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["prev_rendered_content"], "<p>content before edit</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Edits on new lines
|
|
|
|
msg_id_2 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:20:45 +01:00
|
|
|
content="content before edit, line 1\n\ncontent before edit, line 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
new_content_2 = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"content before edit, line 1\n"
|
|
|
|
"content after edit, line 2\n"
|
|
|
|
"content before edit, line 3"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result_2 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_2,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_2)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_2 = self.client_get(f"/json/messages/{msg_id_2}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_2 = orjson.loads(message_edit_history_2.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2 = json_response_2["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[0]["rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["rendered_content"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content before edit, line 1<br>\n"
|
|
|
|
"content after edit, line 2<br>\n"
|
|
|
|
"content before edit, line 3</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
"<div><p>content before edit, line 1<br> "
|
2021-02-12 08:19:30 +01:00
|
|
|
'content <span class="highlight_text_inserted">after edit, line 2<br> '
|
2021-10-01 00:14:28 +02:00
|
|
|
"content</span> before edit, line 3</p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["prev_rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-10-01 00:14:28 +02:00
|
|
|
def test_empty_message_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-10-01 00:14:28 +02:00
|
|
|
topic_name="editing",
|
|
|
|
content="We will edit this to render as empty.",
|
|
|
|
)
|
|
|
|
# Edit that manually to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
self.assert_json_success(
|
|
|
|
self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": "We will edit this to also render as empty.",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# And again tweak to simulate a rendering bug
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.rendered_content = ""
|
|
|
|
message.save(update_fields=["rendered_content"])
|
|
|
|
|
|
|
|
history = self.client_get("/json/messages/" + str(msg_id) + "/history")
|
|
|
|
message_history = orjson.loads(history.content)["message_history"]
|
|
|
|
self.assertEqual(message_history[0]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["rendered_content"], "")
|
|
|
|
self.assertEqual(message_history[1]["content_html_diff"], "<div></div>")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_link(self) -> None:
|
|
|
|
# Link editing
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2020-07-03 11:09:37 +02:00
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="Here is a link to [zulip](www.zulip.org).",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "Here is a link to [zulip](www.zulipchat.com)."
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history_1 = self.client_get(f"/json/messages/{msg_id_1}/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[0]["rendered_content"],
|
2023-01-03 01:51:16 +01:00
|
|
|
'<p>Here is a link to <a href="http://www.zulip.org">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["rendered_content"],
|
2023-01-03 01:51:16 +01:00
|
|
|
'<p>Here is a link to <a href="http://www.zulipchat.com">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-10-01 00:14:28 +02:00
|
|
|
'<div><p>Here is a link to <a href="http://www.zulipchat.com"'
|
2021-02-12 08:20:45 +01:00
|
|
|
">zulip "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
|
|
|
|
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
|
2021-10-01 00:14:28 +02:00
|
|
|
"</span> </a></p></div>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_history_unedited(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
"Denmark",
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="editing",
|
|
|
|
content="This message has not been edited.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
message_history = self.assert_json_success(result)["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_length(message_history, 1)
|
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
def test_mentions_for_message_updates(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.login_user(hamlet)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(hamlet, "Denmark")
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-11 16:26:54 +02:00
|
|
|
msg_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
hamlet, "Denmark", content="@**Cordelia, Lear's daughter**"
|
2021-04-11 16:26:54 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-19 01:12:26 +02:00
|
|
|
mention_user_ids = get_mentions_for_message_updates(msg_id)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(mention_user_ids, {cordelia.id})
|
|
|
|
|
|
|
|
def test_edit_cases(self) -> None:
|
|
|
|
"""This test verifies the accuracy of construction of Zulip's edit
|
|
|
|
history data structures."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2022-03-02 01:08:38 +01:00
|
|
|
stream_1 = self.make_stream("stream 1")
|
|
|
|
stream_2 = self.make_stream("stream 2")
|
|
|
|
stream_3 = self.make_stream("stream 3")
|
|
|
|
self.subscribe(hamlet, stream_1.name)
|
|
|
|
self.subscribe(hamlet, stream_2.name)
|
|
|
|
self.subscribe(hamlet, stream_3.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
2022-03-02 01:08:38 +01:00
|
|
|
self.example_user("hamlet"), "stream 1", topic_name="topic 1", content="content 1"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-03-02 01:08:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 1")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
2022-03-02 02:25:25 +01:00
|
|
|
{"timestamp", "prev_topic", "topic", "user_id"},
|
2022-03-02 01:08:38 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": stream_2.id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_1.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
|
|
|
self.assertEqual(set(history[0].keys()), {"timestamp", "prev_stream", "stream", "user_id"})
|
|
|
|
|
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 3",
|
|
|
|
"topic": "topic 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
2022-03-02 01:08:38 +01:00
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
2021-02-12 08:20:45 +01:00
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "content 4",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "topic 4",
|
2022-03-02 01:08:38 +01:00
|
|
|
"stream_id": stream_3.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
|
|
|
"timestamp",
|
|
|
|
"prev_topic",
|
|
|
|
"topic",
|
|
|
|
"prev_stream",
|
|
|
|
"stream",
|
|
|
|
"user_id",
|
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-03-02 01:08:38 +01:00
|
|
|
# Now, we verify that all of the edits stored in the message.edit_history
|
|
|
|
# have the correct data structure
|
2022-05-31 01:34:34 +02:00
|
|
|
history = orjson.loads(assert_is_not_none(Message.objects.get(id=msg_id).edit_history))
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(history[0]["stream"], stream_3.id)
|
|
|
|
self.assertEqual(history[0]["prev_stream"], stream_2.id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[2]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(history[2]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(history[3]["stream"], stream_2.id)
|
|
|
|
self.assertEqual(history[3]["prev_stream"], stream_1.id)
|
|
|
|
|
|
|
|
self.assertEqual(history[4]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(history[4]["topic"], "topic 2")
|
|
|
|
|
|
|
|
self.assertEqual(history[5]["prev_content"], "content 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Now, we verify that the edit history data sent back has the
|
|
|
|
# correct filled-out fields
|
2021-02-03 14:07:36 +01:00
|
|
|
message_edit_history = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response = orjson.loads(message_edit_history.content)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# We reverse the message history view output so that the IDs line up with the above.
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history = list(reversed(json_response["message_history"]))
|
2020-07-03 11:09:37 +02:00
|
|
|
i = 0
|
|
|
|
for entry in message_history:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"}
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 2, 4}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_topic")
|
2022-03-02 01:08:38 +01:00
|
|
|
expected_entries.add("topic")
|
|
|
|
if i in {1, 2, 5}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_content")
|
|
|
|
expected_entries.add("prev_rendered_content")
|
|
|
|
expected_entries.add("content_html_diff")
|
2022-03-02 01:08:38 +01:00
|
|
|
if i in {0, 3}:
|
|
|
|
expected_entries.add("prev_stream")
|
2022-02-14 17:04:39 +01:00
|
|
|
expected_entries.add("stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
i += 1
|
|
|
|
self.assertEqual(expected_entries, set(entry.keys()))
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assert_length(message_history, 7)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["topic"], "topic 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_topic"], "topic 3")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[0]["stream"], stream_3.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_stream"], stream_2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["content"], "content 4")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[1]["topic"], "topic 3")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[1]["content"], "content 4")
|
|
|
|
self.assertEqual(message_history[1]["prev_content"], "content 3")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[2]["topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_topic"], "topic 2")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[2]["content"], "content 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[3]["topic"], "topic 2")
|
2022-02-14 17:04:39 +01:00
|
|
|
self.assertEqual(message_history[3]["stream"], stream_2.id)
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[3]["prev_stream"], stream_1.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[3]["content"], "content 2")
|
2022-03-02 01:08:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(message_history[4]["topic"], "topic 2")
|
|
|
|
self.assertEqual(message_history[4]["prev_topic"], "topic 1")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[4]["content"], "content 2")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[5]["topic"], "topic 1")
|
2022-03-02 01:08:38 +01:00
|
|
|
self.assertEqual(message_history[5]["content"], "content 2")
|
|
|
|
self.assertEqual(message_history[5]["prev_content"], "content 1")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[6]["content"], "content 1")
|
|
|
|
self.assertEqual(message_history[6]["topic"], "topic 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_content_limit(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
2022-04-12 13:13:02 +02:00
|
|
|
message_content_edit_limit_seconds: Union[int, str],
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
2022-04-12 13:13:02 +02:00
|
|
|
"message_content_edit_limit_seconds": orjson.dumps(
|
|
|
|
message_content_edit_limit_seconds
|
|
|
|
).decode(),
|
2021-05-26 12:21:37 +02:00
|
|
|
"edit_topic_policy": edit_topic_policy,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_success(
|
|
|
|
id_: int, unique_str: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
if topic_only:
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
else:
|
|
|
|
self.check_message(id_, topic_name=new_topic, content=new_content)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("iago"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
# test the various possible message editing settings
|
|
|
|
# high enough time limit, all edits allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 240, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "A")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# out of time, only topic editing allowed
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(True, 120, Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
do_edit_message_assert_error(id_, "C", "The time limit for editing this message has passed")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# infinite time, all edits allowed
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_ADMINS_ONLY)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "D")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-04-19 12:53:29 +02:00
|
|
|
# without allow_message_editing, editing content is not allowed but
|
|
|
|
# editing topic is allowed if topic-edit time limit has not passed
|
|
|
|
# irrespective of content-edit time limit.
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 240, Realm.POLICY_ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
|
|
|
|
set_message_editing_params(False, 240, Realm.POLICY_ADMINS_ONLY)
|
|
|
|
do_edit_message_assert_success(id_, "E", True)
|
2021-05-26 12:21:37 +02:00
|
|
|
set_message_editing_params(False, 120, Realm.POLICY_ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "F", True)
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(False, "unlimited", Realm.POLICY_ADMINS_ONLY)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "G", True)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-26 12:21:37 +02:00
|
|
|
def test_edit_topic_policy(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
2022-04-12 13:13:02 +02:00
|
|
|
message_content_edit_limit_seconds: Union[int, str],
|
2021-05-26 12:21:37 +02:00
|
|
|
edit_topic_policy: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-05-29 11:28:28 +02:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
2022-04-12 13:13:02 +02:00
|
|
|
"message_content_edit_limit_seconds": orjson.dumps(
|
|
|
|
message_content_edit_limit_seconds
|
|
|
|
).decode(),
|
2021-05-26 12:21:37 +02:00
|
|
|
"edit_topic_policy": edit_topic_policy,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_success(id_: int, unique_str: str, acting_user: str) -> None:
|
|
|
|
self.login(acting_user)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
|
2021-05-29 11:45:43 +02:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, acting_user: str
|
|
|
|
) -> None:
|
|
|
|
self.login(acting_user)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
2022-02-21 16:34:12 +01:00
|
|
|
params_dict = {"topic": new_topic}
|
2021-02-03 14:07:36 +01:00
|
|
|
result = self.client_patch(f"/json/messages/{id_}", params_dict)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.example_user("hamlet"), "Denmark", content="content", topic_name="topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
2021-05-26 21:20:11 +02:00
|
|
|
# Guest user must be subscribed to the stream to access the message.
|
|
|
|
polonius = self.example_user("polonius")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(polonius, "Denmark")
|
2021-05-26 21:20:11 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
# any user can edit the topic of a message
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_EVERYONE)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "A", "polonius")
|
|
|
|
|
|
|
|
# only members can edit topic of a message
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_MEMBERS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "B", "You don't have permission to edit this message", "polonius"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "B", "cordelia")
|
|
|
|
|
|
|
|
# only full members can edit topic of a message
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_FULL_MEMBERS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-05-26 21:20:11 +02:00
|
|
|
do_set_realm_property(cordelia.realm, "waiting_period_threshold", 10, acting_user=None)
|
|
|
|
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
|
|
|
|
cordelia.save()
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet.date_joined = timezone_now() - datetime.timedelta(days=9)
|
|
|
|
hamlet.save()
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "C", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
# User who sent the message but is not a full member cannot edit
|
|
|
|
# the topic
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "C", "You don't have permission to edit this message", "hamlet"
|
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
|
|
|
|
cordelia.save()
|
2022-07-28 17:03:42 +02:00
|
|
|
hamlet.date_joined = timezone_now() - datetime.timedelta(days=11)
|
|
|
|
hamlet.save()
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "C", "cordelia")
|
2022-07-28 17:03:42 +02:00
|
|
|
do_edit_message_assert_success(id_, "CD", "hamlet")
|
2021-05-26 21:20:11 +02:00
|
|
|
|
|
|
|
# only moderators can edit topic of a message
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_MODERATORS_ONLY)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "You don't have permission to edit this message", "cordelia"
|
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
# even user who sent the message but is not a moderator cannot edit the topic.
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "D", "You don't have permission to edit this message", "hamlet"
|
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "D", "shiva")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# only admins can edit the topics of messages
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_ADMINS_ONLY)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_error(
|
2021-05-26 21:20:11 +02:00
|
|
|
id_, "E", "You don't have permission to edit this message", "shiva"
|
2021-05-29 11:45:43 +02:00
|
|
|
)
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-09-28 16:30:10 +02:00
|
|
|
# even owners and admins cannot edit the topics of messages
|
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_NOBODY)
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "H", "You don't have permission to edit this message", "desdemona"
|
|
|
|
)
|
|
|
|
do_edit_message_assert_error(
|
|
|
|
id_, "H", "You don't have permission to edit this message", "iago"
|
|
|
|
)
|
|
|
|
|
2022-04-19 12:53:29 +02:00
|
|
|
# users can edit topics even if allow_message_editing is False
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(False, "unlimited", Realm.POLICY_EVERYONE)
|
2022-04-19 12:53:29 +02:00
|
|
|
do_edit_message_assert_success(id_, "D", "cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-10-11 18:55:40 +02:00
|
|
|
# non-admin users cannot edit topics sent > 1 week ago including
|
2022-07-28 17:03:42 +02:00
|
|
|
# sender of the message.
|
2022-10-11 18:55:40 +02:00
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=604900)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
2022-04-12 13:13:02 +02:00
|
|
|
set_message_editing_params(True, "unlimited", Realm.POLICY_EVERYONE)
|
2021-05-29 11:45:43 +02:00
|
|
|
do_edit_message_assert_success(id_, "E", "iago")
|
2021-05-26 21:20:11 +02:00
|
|
|
do_edit_message_assert_success(id_, "F", "shiva")
|
2021-06-04 03:52:39 +02:00
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "cordelia"
|
2021-06-04 03:52:39 +02:00
|
|
|
)
|
2022-07-28 17:03:42 +02:00
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "hamlet"
|
2022-07-28 17:03:42 +02:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2023-03-16 13:32:30 +01:00
|
|
|
# topic edit permissions apply on "no topic" messages as well
|
|
|
|
message.set_topic_name("(no topic)")
|
|
|
|
message.save()
|
|
|
|
do_edit_message_assert_error(
|
2023-05-29 20:21:44 +02:00
|
|
|
id_, "G", "The time limit for editing this message's topic has passed.", "cordelia"
|
2023-03-16 13:32:30 +01:00
|
|
|
)
|
|
|
|
|
2022-10-11 18:55:40 +02:00
|
|
|
# set the topic edit limit to two weeks
|
|
|
|
do_set_realm_property(
|
|
|
|
hamlet.realm,
|
|
|
|
"move_messages_within_stream_limit_seconds",
|
|
|
|
604800 * 2,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_edit_message_assert_success(id_, "G", "cordelia")
|
|
|
|
do_edit_message_assert_success(id_, "H", "hamlet")
|
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event")
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
|
|
|
|
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_update_message_topic_success(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
topic_name: str,
|
|
|
|
users_to_be_notified: List[Dict[str, Any]],
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
do_update_message(
|
|
|
|
user_profile=user_profile,
|
2021-05-10 06:10:32 +02:00
|
|
|
target_message=message,
|
2020-07-03 11:09:37 +02:00
|
|
|
new_stream=None,
|
|
|
|
topic_name=topic_name,
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
2021-06-17 12:20:40 +02:00
|
|
|
rendering_result=None,
|
2020-07-03 11:09:37 +02:00
|
|
|
prior_mention_user_ids=set(),
|
|
|
|
mention_data=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
|
|
|
|
|
|
|
|
# Returns the users that need to be notified when a message topic is changed
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
um = UserMessage.objects.get(message=message_id)
|
|
|
|
if um.user_profile_id == user_id:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": um.flags_list(),
|
|
|
|
}
|
|
|
|
|
|
|
|
else:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["read"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
# Edit topic of a message sent before Cordelia subscribed the stream
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Othello eats apple", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# If Cordelia is long-term idle, she doesn't get a notification.
|
|
|
|
cordelia.long_term_idle = True
|
|
|
|
cordelia.save()
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Another topic idle", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
cordelia.long_term_idle = False
|
|
|
|
cordelia.save()
|
|
|
|
|
|
|
|
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
|
|
|
|
# because he has a UserMessage row.
|
|
|
|
self.unsubscribe(hamlet, stream_name)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
|
|
|
|
|
|
|
|
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
|
|
|
|
# the message topic. Cordelia won't receive any updates when a message on that stream is
|
|
|
|
# changed because she is not a subscriber and doesn't have a UserMessage row.
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.unsubscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
|
|
|
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
|
|
|
|
|
2023-03-28 16:47:07 +02:00
|
|
|
@mock.patch("zerver.actions.user_topics.send_event")
|
2022-03-18 01:19:16 +01:00
|
|
|
def test_edit_muted_topic(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Stream 123"
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="Topic1", content="Hello World"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(aaron, stream_name)
|
|
|
|
self.login_user(aaron)
|
|
|
|
|
2023-03-28 20:09:25 +02:00
|
|
|
def assert_is_topic_muted(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
topic_name: str,
|
|
|
|
*,
|
|
|
|
muted: bool,
|
|
|
|
) -> None:
|
|
|
|
if muted:
|
|
|
|
self.assertTrue(
|
|
|
|
topic_has_visibility_policy(
|
|
|
|
user_profile, stream_id, topic_name, UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assertFalse(
|
|
|
|
topic_has_visibility_policy(
|
|
|
|
user_profile, stream_id, topic_name, UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-03-25 00:57:32 +01:00
|
|
|
already_muted_topic = "Already muted topic"
|
2022-03-18 01:19:16 +01:00
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "Topic1"],
|
|
|
|
[stream_name, "Topic2"],
|
2022-03-25 00:57:32 +01:00
|
|
|
[stream_name, already_muted_topic],
|
2022-03-18 01:19:16 +01:00
|
|
|
]
|
2023-03-26 15:36:01 +02:00
|
|
|
set_topic_visibility_policy(hamlet, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
set_topic_visibility_policy(cordelia, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
2023-03-28 16:47:07 +02:00
|
|
|
# users that need to be notified by send_event in the case of change-topic-name operation.
|
|
|
|
users_to_be_notified_via_muted_topics_event: List[int] = []
|
|
|
|
users_to_be_notified_via_user_topic_event: List[int] = []
|
|
|
|
for user_topic in get_users_with_user_topic_visibility_policy(stream.id, "Topic1"):
|
|
|
|
# We are appending the same data twice because 'user_topic' event notifies
|
|
|
|
# the user during delete and create operation.
|
|
|
|
users_to_be_notified_via_user_topic_event.append(user_topic.user_profile_id)
|
|
|
|
users_to_be_notified_via_user_topic_event.append(user_topic.user_profile_id)
|
|
|
|
# 'muted_topics' event notifies the user of muted topics during create
|
|
|
|
# operation only.
|
|
|
|
users_to_be_notified_via_muted_topics_event.append(user_topic.user_profile_id)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
|
|
|
change_all_topic_name = "Topic 1 edited"
|
2023-04-04 05:30:41 +02:00
|
|
|
# Verify how many total database queries are required. We
|
|
|
|
# expect 6 queries (4/visibility_policy to update the muted
|
|
|
|
# state + 1/user with a UserTopic row for the events data)
|
|
|
|
# beyond what is typical were there not UserTopic records to
|
|
|
|
# update. Ideally, we'd eliminate the per-user component.
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(21):
|
2022-03-18 01:19:16 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_all_topic_name,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
2023-03-28 16:47:07 +02:00
|
|
|
# Extract the send_event call where event type is 'user_topic' or 'muted_topics.
|
|
|
|
# Here we assert that the expected users are notified properly.
|
|
|
|
users_notified_via_muted_topics_event: List[int] = []
|
|
|
|
users_notified_via_user_topic_event: List[int] = []
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
|
|
|
if arg_event["type"] == "user_topic":
|
|
|
|
users_notified_via_user_topic_event.append(*arg_notified_users)
|
|
|
|
elif arg_event["type"] == "muted_topics":
|
|
|
|
users_notified_via_muted_topics_event.append(*arg_notified_users)
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(users_notified_via_muted_topics_event),
|
|
|
|
sorted(users_to_be_notified_via_muted_topics_event),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(users_notified_via_user_topic_event),
|
|
|
|
sorted(users_to_be_notified_via_user_topic_event),
|
|
|
|
)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(hamlet, stream.id, "Topic1", muted=False)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, "Topic1", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, "Topic1", muted=False)
|
|
|
|
assert_is_topic_muted(hamlet, stream.id, "Topic2", muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, "Topic2", muted=True)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, "Topic2", muted=False)
|
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_all_topic_name, muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, change_all_topic_name, muted=True)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, change_all_topic_name, muted=False)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
|
|
|
change_later_topic_name = "Topic 1 edited again"
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_later_topic_name,
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_all_topic_name, muted=False)
|
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_later_topic_name, muted=True)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
2022-03-25 00:57:32 +01:00
|
|
|
# Make sure we safely handle the case of the new topic being already muted.
|
2023-05-10 04:47:55 +02:00
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=already_muted_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
2022-03-25 00:57:32 +01:00
|
|
|
)
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_later_topic_name, muted=False)
|
|
|
|
assert_is_topic_muted(hamlet, stream.id, already_muted_topic, muted=True)
|
2022-03-25 00:57:32 +01:00
|
|
|
|
2022-03-18 01:19:16 +01:00
|
|
|
change_one_topic_name = "Topic 1 edited change_one"
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_one_topic_name,
|
|
|
|
propagate_mode="change_one",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_one_topic_name, muted=True)
|
|
|
|
assert_is_topic_muted(hamlet, stream.id, change_later_topic_name, muted=False)
|
2022-03-18 01:19:16 +01:00
|
|
|
|
2022-03-21 15:07:45 +01:00
|
|
|
# Move topic between two public streams.
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic", content="Hello World"
|
|
|
|
)
|
|
|
|
new_public_stream = self.make_stream("New public stream")
|
|
|
|
self.subscribe(desdemona, new_public_stream.name)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic"],
|
|
|
|
]
|
2023-03-26 15:36:01 +02:00
|
|
|
set_topic_visibility_policy(desdemona, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
set_topic_visibility_policy(cordelia, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(30):
|
2022-03-21 15:07:45 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(desdemona, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(desdemona, new_public_stream.id, "New topic", muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, new_public_stream.id, "New topic", muted=True)
|
|
|
|
assert_is_topic_muted(aaron, new_public_stream.id, "New topic", muted=False)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
|
|
|
# Move topic to a private stream.
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic", content="Hello World"
|
|
|
|
)
|
|
|
|
new_private_stream = self.make_stream("New private stream", invite_only=True)
|
|
|
|
self.subscribe(desdemona, new_private_stream.name)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic"],
|
|
|
|
]
|
2023-03-26 15:36:01 +02:00
|
|
|
set_topic_visibility_policy(desdemona, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
set_topic_visibility_policy(cordelia, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(35):
|
2022-03-21 15:07:45 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_private_stream.id,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Cordelia is not subscribed to the private stream, so
|
|
|
|
# Cordelia should have had the topic unmuted, while Desdemona
|
|
|
|
# should have had her muted topic record moved.
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(desdemona, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(desdemona, new_private_stream.id, "New topic", muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, new_private_stream.id, "New topic", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, new_private_stream.id, "New topic", muted=False)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
|
|
|
# Move topic between two public streams with change in topic name.
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="New topic 2", content="Hello World"
|
|
|
|
)
|
|
|
|
self.login_user(desdemona)
|
|
|
|
muted_topics = [
|
|
|
|
[stream_name, "New topic 2"],
|
|
|
|
]
|
2023-03-26 15:36:01 +02:00
|
|
|
set_topic_visibility_policy(desdemona, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
set_topic_visibility_policy(cordelia, muted_topics, UserTopic.VisibilityPolicy.MUTED)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(30):
|
2022-03-21 15:07:45 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
topic_name="changed topic name",
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(desdemona, stream.id, "New topic 2", muted=False)
|
|
|
|
assert_is_topic_muted(cordelia, stream.id, "New topic 2", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, stream.id, "New topic 2", muted=False)
|
|
|
|
assert_is_topic_muted(desdemona, new_public_stream.id, "changed topic name", muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, new_public_stream.id, "changed topic name", muted=True)
|
|
|
|
assert_is_topic_muted(aaron, new_public_stream.id, "changed topic name", muted=False)
|
2022-03-21 15:07:45 +01:00
|
|
|
|
2023-03-17 15:09:50 +01:00
|
|
|
# Moving only half the messages doesn't move UserTopic records.
|
2022-03-24 23:51:31 +01:00
|
|
|
second_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="changed topic name", content="Second message"
|
|
|
|
)
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(26):
|
2022-03-24 23:51:31 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=desdemona,
|
|
|
|
message_id=second_message_id,
|
|
|
|
stream_id=new_public_stream.id,
|
|
|
|
topic_name="final topic name",
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
2023-03-28 20:09:25 +02:00
|
|
|
assert_is_topic_muted(desdemona, new_public_stream.id, "changed topic name", muted=True)
|
|
|
|
assert_is_topic_muted(cordelia, new_public_stream.id, "changed topic name", muted=True)
|
|
|
|
assert_is_topic_muted(aaron, new_public_stream.id, "changed topic name", muted=False)
|
|
|
|
assert_is_topic_muted(desdemona, new_public_stream.id, "final topic name", muted=False)
|
|
|
|
assert_is_topic_muted(cordelia, new_public_stream.id, "final topic name", muted=False)
|
|
|
|
assert_is_topic_muted(aaron, new_public_stream.id, "final topic name", muted=False)
|
2022-03-24 23:51:31 +01:00
|
|
|
|
2023-03-17 15:09:50 +01:00
|
|
|
@mock.patch("zerver.actions.user_topics.send_event")
|
|
|
|
def test_edit_unmuted_topic(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Stream 123"
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
2023-03-20 19:35:55 +01:00
|
|
|
othello = self.example_user("othello")
|
2023-03-17 15:09:50 +01:00
|
|
|
|
|
|
|
def assert_has_visibility_policy(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
topic_name: str,
|
|
|
|
visibility_policy: int,
|
|
|
|
*,
|
|
|
|
expected: bool,
|
|
|
|
) -> None:
|
|
|
|
if expected:
|
|
|
|
self.assertTrue(
|
|
|
|
topic_has_visibility_policy(
|
|
|
|
user_profile, stream.id, topic_name, visibility_policy
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assertFalse(
|
|
|
|
topic_has_visibility_policy(
|
|
|
|
user_profile, stream.id, topic_name, visibility_policy
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name="Topic1", content="Hello World"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(aaron, stream_name)
|
|
|
|
self.login_user(aaron)
|
2023-03-20 19:35:55 +01:00
|
|
|
self.subscribe(othello, stream_name)
|
|
|
|
self.login_user(othello)
|
2023-03-17 15:09:50 +01:00
|
|
|
|
2023-03-20 19:35:55 +01:00
|
|
|
# Initially, hamlet and othello set visibility_policy as UNMUTED for 'Topic1' and 'Topic2',
|
2023-03-17 15:09:50 +01:00
|
|
|
# cordelia sets visibility_policy as MUTED for 'Topic1' and 'Topic2', while
|
|
|
|
# aaron doesn't have a visibility_policy set for 'Topic1' or 'Topic2'.
|
|
|
|
#
|
|
|
|
# After moving messages from 'Topic1' to 'Topic 1 edited', the expected behaviour is:
|
2023-03-20 19:35:55 +01:00
|
|
|
# hamlet and othello have UNMUTED 'Topic 1 edited' and no visibility_policy set for 'Topic1'
|
2023-03-17 15:09:50 +01:00
|
|
|
# cordelia has MUTED 'Topic 1 edited' and no visibility_policy set for 'Topic1'
|
|
|
|
#
|
|
|
|
# There is no change in visibility_policy configurations for 'Topic2', i.e.
|
2023-03-20 19:35:55 +01:00
|
|
|
# hamlet and othello have UNMUTED 'Topic2' + cordelia has MUTED 'Topic2'
|
2023-03-17 15:09:50 +01:00
|
|
|
# aaron still doesn't have visibility_policy set for any topic.
|
2023-03-20 19:35:55 +01:00
|
|
|
#
|
|
|
|
# Note: We have used two users with UNMUTED 'Topic1' to verify that the query count
|
|
|
|
# doesn't increase (in order to update UserTopic records) with an increase in users.
|
|
|
|
# (We are using bulk database operations.)
|
|
|
|
# 1 query/user is added in order to send muted_topics event.(which will be deprecated)
|
2023-03-17 15:09:50 +01:00
|
|
|
topics = [
|
|
|
|
[stream_name, "Topic1"],
|
|
|
|
[stream_name, "Topic2"],
|
|
|
|
]
|
|
|
|
set_topic_visibility_policy(hamlet, topics, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
set_topic_visibility_policy(cordelia, topics, UserTopic.VisibilityPolicy.MUTED)
|
2023-03-20 19:35:55 +01:00
|
|
|
set_topic_visibility_policy(othello, topics, UserTopic.VisibilityPolicy.UNMUTED)
|
2023-03-17 15:09:50 +01:00
|
|
|
|
|
|
|
# users that need to be notified by send_event in the case of change-topic-name operation.
|
|
|
|
users_to_be_notified_via_muted_topics_event: List[int] = []
|
|
|
|
users_to_be_notified_via_user_topic_event: List[int] = []
|
|
|
|
for user_topic in get_users_with_user_topic_visibility_policy(stream.id, "Topic1"):
|
|
|
|
# We are appending the same data twice because 'user_topic' event notifies
|
|
|
|
# the user during delete and create operation.
|
|
|
|
users_to_be_notified_via_user_topic_event.append(user_topic.user_profile_id)
|
|
|
|
users_to_be_notified_via_user_topic_event.append(user_topic.user_profile_id)
|
|
|
|
# 'muted_topics' event notifies the user of muted topics during create
|
|
|
|
# operation only.
|
|
|
|
users_to_be_notified_via_muted_topics_event.append(user_topic.user_profile_id)
|
|
|
|
|
|
|
|
change_all_topic_name = "Topic 1 edited"
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(26):
|
2023-03-17 15:09:50 +01:00
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=change_all_topic_name,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'user_topic' or 'muted_topics.
|
|
|
|
# Here we assert that the expected users are notified properly.
|
|
|
|
users_notified_via_muted_topics_event: List[int] = []
|
|
|
|
users_notified_via_user_topic_event: List[int] = []
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
|
|
|
if arg_event["type"] == "user_topic":
|
|
|
|
users_notified_via_user_topic_event.append(*arg_notified_users)
|
|
|
|
elif arg_event["type"] == "muted_topics":
|
|
|
|
users_notified_via_muted_topics_event.append(*arg_notified_users)
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(users_notified_via_muted_topics_event),
|
|
|
|
sorted(users_to_be_notified_via_muted_topics_event),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
sorted(users_notified_via_user_topic_event),
|
|
|
|
sorted(users_to_be_notified_via_user_topic_event),
|
|
|
|
)
|
|
|
|
|
2023-03-20 19:35:55 +01:00
|
|
|
# No visibility_policy set for 'Topic1'
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
hamlet, "Topic1", UserTopic.VisibilityPolicy.UNMUTED, expected=False
|
|
|
|
)
|
|
|
|
assert_has_visibility_policy(
|
|
|
|
cordelia, "Topic1", UserTopic.VisibilityPolicy.MUTED, expected=False
|
|
|
|
)
|
2023-03-20 19:35:55 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
othello, "Topic1", UserTopic.VisibilityPolicy.UNMUTED, expected=False
|
|
|
|
)
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
aaron, "Topic1", UserTopic.VisibilityPolicy.UNMUTED, expected=False
|
|
|
|
)
|
2023-03-20 19:35:55 +01:00
|
|
|
# No change in visibility_policy configurations for 'Topic2'
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
hamlet, "Topic2", UserTopic.VisibilityPolicy.UNMUTED, expected=True
|
|
|
|
)
|
|
|
|
assert_has_visibility_policy(
|
|
|
|
cordelia, "Topic2", UserTopic.VisibilityPolicy.MUTED, expected=True
|
|
|
|
)
|
2023-03-20 19:35:55 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
othello, "Topic2", UserTopic.VisibilityPolicy.UNMUTED, expected=True
|
|
|
|
)
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
aaron, "Topic2", UserTopic.VisibilityPolicy.UNMUTED, expected=False
|
|
|
|
)
|
2023-03-20 19:35:55 +01:00
|
|
|
# UserTopic records moved to 'Topic 1 edited' after move-topic operation.
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
hamlet, change_all_topic_name, UserTopic.VisibilityPolicy.UNMUTED, expected=True
|
|
|
|
)
|
|
|
|
assert_has_visibility_policy(
|
|
|
|
cordelia, change_all_topic_name, UserTopic.VisibilityPolicy.MUTED, expected=True
|
|
|
|
)
|
2023-03-20 19:35:55 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
othello, change_all_topic_name, UserTopic.VisibilityPolicy.UNMUTED, expected=True
|
|
|
|
)
|
2023-03-17 15:09:50 +01:00
|
|
|
assert_has_visibility_policy(
|
|
|
|
aaron, change_all_topic_name, UserTopic.VisibilityPolicy.MUTED, expected=False
|
|
|
|
)
|
|
|
|
|
2023-04-09 20:58:00 +02:00
|
|
|
def test_merge_user_topic_states_on_move_messages(self) -> None:
|
|
|
|
stream_name = "Stream 123"
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
|
|
|
|
def assert_has_visibility_policy(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
topic_name: str,
|
|
|
|
visibility_policy: int,
|
|
|
|
) -> None:
|
|
|
|
self.assertTrue(
|
|
|
|
topic_has_visibility_policy(user_profile, stream.id, topic_name, visibility_policy)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(aaron, stream_name)
|
|
|
|
self.login_user(aaron)
|
|
|
|
|
|
|
|
# Test the following cases:
|
|
|
|
#
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# INHERIT INHERIT INHERIT
|
|
|
|
# INHERIT MUTED INHERIT
|
|
|
|
# INHERIT UNMUTED UNMUTED
|
|
|
|
orig_topic = "Topic1"
|
|
|
|
target_topic = "Topic1 edited"
|
|
|
|
orig_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=orig_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=target_topic, content="Hello World 2"
|
|
|
|
)
|
|
|
|
|
|
|
|
# By default:
|
|
|
|
# visibility_policy of 'hamlet', 'cordelia', 'aaron' for 'orig_topic': INHERIT
|
|
|
|
# visibility_policy of 'hamlet' for 'target_topic': INHERIT
|
|
|
|
#
|
|
|
|
# So we don't need to manually set visibility_policy to INHERIT whenever required,
|
|
|
|
# here and later in this test.
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
aaron, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=orig_message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=target_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_has_visibility_policy(hamlet, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(hamlet, target_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, target_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
|
|
|
|
# Test the following cases:
|
|
|
|
#
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# MUTED INHERIT INHERIT
|
|
|
|
# MUTED MUTED MUTED
|
|
|
|
# MUTED UNMUTED UNMUTED
|
|
|
|
orig_topic = "Topic2"
|
|
|
|
target_topic = "Topic2 edited"
|
|
|
|
orig_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=orig_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=target_topic, content="Hello World 2"
|
|
|
|
)
|
|
|
|
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
aaron, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
aaron, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=orig_message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=target_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_has_visibility_policy(hamlet, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(hamlet, target_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, target_topic, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
assert_has_visibility_policy(aaron, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
|
|
|
|
# Test the following cases:
|
|
|
|
#
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# UNMUTED INHERIT UNMUTED
|
|
|
|
# UNMUTED MUTED UNMUTED
|
|
|
|
# UNMUTED UNMUTED UNMUTED
|
|
|
|
orig_topic = "Topic3"
|
|
|
|
target_topic = "Topic3 edited"
|
|
|
|
orig_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=orig_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=target_topic, content="Hello World 2"
|
|
|
|
)
|
|
|
|
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
aaron, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
aaron, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=orig_message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=target_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_has_visibility_policy(hamlet, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(hamlet, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
assert_has_visibility_policy(cordelia, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
assert_has_visibility_policy(aaron, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
|
2023-05-11 18:33:01 +02:00
|
|
|
def test_user_topic_states_on_moving_to_topic_with_no_messages(self) -> None:
|
|
|
|
stream_name = "Stream 123"
|
|
|
|
stream = self.make_stream(stream_name)
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.subscribe(aaron, stream_name)
|
|
|
|
|
|
|
|
def assert_has_visibility_policy(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
topic_name: str,
|
|
|
|
visibility_policy: int,
|
|
|
|
) -> None:
|
|
|
|
self.assertTrue(
|
|
|
|
topic_has_visibility_policy(user_profile, stream.id, topic_name, visibility_policy)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test the case where target topic has no messages:
|
|
|
|
#
|
|
|
|
# orig_topic | final behaviour
|
|
|
|
# INHERIT INHERIT
|
|
|
|
# UNMUTED UNMUTED
|
|
|
|
# MUTED MUTED
|
|
|
|
|
|
|
|
orig_topic = "Topic1"
|
|
|
|
target_topic = "Topic1 edited"
|
|
|
|
orig_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=orig_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, orig_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=orig_message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=target_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_has_visibility_policy(hamlet, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(hamlet, target_topic, UserTopic.VisibilityPolicy.UNMUTED)
|
|
|
|
assert_has_visibility_policy(cordelia, target_topic, UserTopic.VisibilityPolicy.MUTED)
|
|
|
|
assert_has_visibility_policy(aaron, target_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
|
|
|
|
def test_user_topic_state_for_messages_deleted_from_target_topic(
|
|
|
|
orig_topic: str, target_topic: str, original_topic_state: int
|
|
|
|
) -> None:
|
|
|
|
# Test the case where target topic has no messages but has UserTopic row
|
|
|
|
# due to messages being deleted from the target topic.
|
|
|
|
orig_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=orig_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
target_message_id = self.send_stream_message(
|
|
|
|
hamlet, stream_name, topic_name=target_topic, content="Hello World"
|
|
|
|
)
|
|
|
|
|
|
|
|
if original_topic_state != UserTopic.VisibilityPolicy.INHERIT:
|
|
|
|
users = [hamlet, cordelia, aaron]
|
|
|
|
for user in users:
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
user, stream, orig_topic, visibility_policy=original_topic_state
|
|
|
|
)
|
|
|
|
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.UNMUTED
|
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
cordelia, stream, target_topic, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
|
|
|
)
|
|
|
|
|
|
|
|
# Delete the message in target topic to make it empty.
|
|
|
|
self.login("hamlet")
|
|
|
|
do_set_realm_property(
|
|
|
|
hamlet.realm,
|
|
|
|
"delete_own_message_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
self.client_delete(f"/json/messages/{target_message_id}")
|
|
|
|
|
|
|
|
check_update_message(
|
|
|
|
user_profile=hamlet,
|
|
|
|
message_id=orig_message_id,
|
|
|
|
stream_id=None,
|
|
|
|
topic_name=target_topic,
|
|
|
|
propagate_mode="change_all",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_has_visibility_policy(hamlet, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(cordelia, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(aaron, orig_topic, UserTopic.VisibilityPolicy.INHERIT)
|
|
|
|
assert_has_visibility_policy(hamlet, target_topic, original_topic_state)
|
|
|
|
assert_has_visibility_policy(cordelia, target_topic, original_topic_state)
|
|
|
|
assert_has_visibility_policy(aaron, target_topic, original_topic_state)
|
|
|
|
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# INHERIT INHERIT INHERIT
|
|
|
|
# INHERIT UNMUTED INHERIT
|
|
|
|
# INHERIT MUTED INHERIT
|
|
|
|
test_user_topic_state_for_messages_deleted_from_target_topic(
|
|
|
|
orig_topic="Topic2",
|
|
|
|
target_topic="Topic2 edited",
|
|
|
|
original_topic_state=UserTopic.VisibilityPolicy.INHERIT,
|
|
|
|
)
|
|
|
|
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# MUTED INHERIT MUTED
|
|
|
|
# MUTED UNMUTED MUTED
|
|
|
|
# MUTED MUTED MUTED
|
|
|
|
test_user_topic_state_for_messages_deleted_from_target_topic(
|
|
|
|
orig_topic="Topic3",
|
|
|
|
target_topic="Topic3 edited",
|
|
|
|
original_topic_state=UserTopic.VisibilityPolicy.MUTED,
|
|
|
|
)
|
|
|
|
|
|
|
|
# orig_topic | target_topic | final behaviour
|
|
|
|
# UNMUTED INHERIT UNMUTED
|
|
|
|
# UNMUTED UNMUTED UNMUTED
|
|
|
|
# UNMUTED MUTED UNMUTED
|
|
|
|
test_user_topic_state_for_messages_deleted_from_target_topic(
|
|
|
|
orig_topic="Topic4",
|
|
|
|
target_topic="Topic4 edited",
|
|
|
|
original_topic_state=UserTopic.VisibilityPolicy.UNMUTED,
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:55:52 +02:00
|
|
|
@mock.patch("zerver.actions.message_edit.send_event")
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["wildcard_mentioned"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{message_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "Hello @**everyone**",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert wildcard_mention_user_ids has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(arg_event["wildcard_mention_user_ids"], [cordelia.id, hamlet.id])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
|
|
|
|
2021-12-06 18:40:30 +01:00
|
|
|
def test_wildcard_mention_restrictions_when_editing(self) -> None:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
self.login("cordelia")
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(shiva, stream_name)
|
2021-12-06 18:40:30 +01:00
|
|
|
message_id = self.send_stream_message(cordelia, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
realm = cordelia.realm
|
|
|
|
do_set_realm_property(
|
|
|
|
realm,
|
|
|
|
"wildcard_mention_policy",
|
|
|
|
Realm.WILDCARD_MENTION_POLICY_MODERATORS,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "You do not have permission to use wildcard mentions in this stream."
|
|
|
|
)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=14):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
message_id = self.send_stream_message(shiva, stream_name, "Hi everyone")
|
|
|
|
with mock.patch("zerver.lib.message.num_subscribers_for_stream_id", return_value=17):
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
|
|
|
"content": "Hello @**everyone**",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2023-06-14 13:35:54 +02:00
|
|
|
def test_user_group_mention_restrictions_while_editing(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.subscribe(iago, "test_stream")
|
|
|
|
self.subscribe(shiva, "test_stream")
|
|
|
|
self.subscribe(othello, "test_stream")
|
|
|
|
self.subscribe(cordelia, "test_stream")
|
|
|
|
|
|
|
|
leadership = check_add_user_group(othello.realm, "leadership", [othello], acting_user=None)
|
|
|
|
support = check_add_user_group(othello.realm, "support", [othello], acting_user=None)
|
|
|
|
|
|
|
|
moderators_system_group = UserGroup.objects.get(
|
|
|
|
realm=iago.realm, name=UserGroup.MODERATORS_GROUP_NAME, is_system_group=True
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login("cordelia")
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
leadership.can_mention_group = moderators_system_group
|
|
|
|
leadership.save()
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
f"You are not allowed to mention user group '{leadership.name}'. You must be a member of '{moderators_system_group.name}' to mention this group.",
|
|
|
|
)
|
|
|
|
|
|
|
|
# The restriction does not apply on silent mention.
|
|
|
|
msg_id = self.send_stream_message(cordelia, "test_stream", "Test message")
|
|
|
|
content = "Edited test message @_*leadership*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
content = "Edited test message @*leadership*"
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.login("iago")
|
|
|
|
msg_id = self.send_stream_message(iago, "test_stream", "Test message")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
test = check_add_user_group(shiva.realm, "test", [shiva], acting_user=None)
|
|
|
|
add_subgroups_to_user_group(leadership, [test], acting_user=None)
|
|
|
|
support.can_mention_group = leadership
|
|
|
|
support.save()
|
|
|
|
|
|
|
|
content = "Test mentioning user group @*support*"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
f"You are not allowed to mention user group '{support.name}'. You must be a member of '{leadership.name}' to mention this group.",
|
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(othello, "test_stream", "Test message")
|
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(iago, "test_stream", "Test message")
|
|
|
|
content = "Test mentioning user group @*support* @*leadership*"
|
|
|
|
|
|
|
|
self.login("iago")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
f"You are not allowed to mention user group '{support.name}'. You must be a member of '{leadership.name}' to mention this group.",
|
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(othello, "test_stream", "Test message")
|
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
f"You are not allowed to mention user group '{leadership.name}'. You must be a member of '{moderators_system_group.name}' to mention this group.",
|
|
|
|
)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(shiva, "test_stream", "Test message")
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"content": content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-12-28 11:30:07 +01:00
|
|
|
def test_topic_edit_history_saved_in_all_message(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
def verify_edit_history(new_topic: str, len_edit_history: int) -> None:
|
|
|
|
for msg_id in [id1, id2, id5]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
# Since edit history is being generated by do_update_message,
|
|
|
|
# it's contents can vary over time; So, to keep this test
|
|
|
|
# future proof, we only verify it's length.
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assert_length(
|
|
|
|
orjson.loads(assert_is_not_none(msg.edit_history)), len_edit_history
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
for msg_id in [id3, id4]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(msg.edit_history, None)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 1)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited2"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 2)
|
|
|
|
|
2021-04-22 07:23:04 +02:00
|
|
|
def test_topic_and_content_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 1", "topic")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", "message 2", "topic")
|
|
|
|
id3 = self.send_stream_message(self.example_user("hamlet"), "Denmark", "message 3", "topic")
|
2021-04-22 07:23:04 +02:00
|
|
|
|
|
|
|
new_topic = "edited"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
"content": "edited message",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Content change of only id1 should come in edit history
|
|
|
|
# and topic change should be present in all the messages.
|
|
|
|
msg1 = Message.objects.get(id=id1)
|
|
|
|
msg2 = Message.objects.get(id=id2)
|
|
|
|
msg3 = Message.objects.get(id=id3)
|
|
|
|
|
2022-05-31 01:34:34 +02:00
|
|
|
msg1_edit_history = orjson.loads(assert_is_not_none(msg1.edit_history))
|
2022-12-12 03:39:16 +01:00
|
|
|
self.assertTrue("prev_content" in msg1_edit_history[0])
|
2021-04-22 07:23:04 +02:00
|
|
|
|
|
|
|
for msg in [msg2, msg3]:
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assertFalse(
|
2022-12-12 03:39:16 +01:00
|
|
|
"prev_content" in orjson.loads(assert_is_not_none(msg.edit_history))[0]
|
2022-05-31 01:34:34 +02:00
|
|
|
)
|
2021-04-22 07:23:04 +02:00
|
|
|
|
|
|
|
for msg in [msg1, msg2, msg3]:
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
2022-05-31 01:34:34 +02:00
|
|
|
self.assert_length(orjson.loads(assert_is_not_none(msg.edit_history)), 1)
|
2021-04-22 07:23:04 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_propagate_topic_forward(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id1}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
|
|
|
|
def test_propagate_all_topics(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic1")
|
|
|
|
id6 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="topic3")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
self.check_topic(id6, topic_name="topic3")
|
|
|
|
|
|
|
|
def test_propagate_all_topics_with_different_uppercase_letters(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Denmark", topic_name="Topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Verona", topic_name="topiC1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("iago"), "Denmark", topic_name="toPic1")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{id2}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topiC1")
|
|
|
|
self.check_topic(id4, topic_name="edited")
|
|
|
|
|
2022-10-14 12:18:37 +02:00
|
|
|
def test_change_all_propagate_mode_for_moving_old_messages(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
id1 = self.send_stream_message(user_profile, "Denmark", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(user_profile, "Denmark", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(user_profile, "Denmark", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(user_profile, "Denmark", topic_name="topic1")
|
|
|
|
self.send_stream_message(user_profile, "Denmark", topic_name="topic1")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id1)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=10)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id2)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=8)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id3)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=5)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
denmark = get_stream("Denmark", user_profile.realm)
|
|
|
|
old_topic = "topic1"
|
|
|
|
old_stream = denmark
|
|
|
|
|
|
|
|
def test_moving_all_topic_messages(
|
|
|
|
new_topic: Optional[str] = None, new_stream: Optional[Stream] = None
|
|
|
|
) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
params_dict: Dict[str, Union[str, int]] = {
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
}
|
|
|
|
|
|
|
|
if new_topic is not None:
|
|
|
|
params_dict["topic"] = new_topic
|
|
|
|
else:
|
|
|
|
new_topic = old_topic
|
|
|
|
|
|
|
|
if new_stream is not None:
|
|
|
|
params_dict["stream_id"] = new_stream.id
|
|
|
|
else:
|
|
|
|
new_stream = old_stream
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
params_dict,
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You only have permission to move the 3/5 most recent messages in this topic.",
|
|
|
|
)
|
|
|
|
# Check message count in old topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, old_topic)
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
|
|
|
|
# Check message count in new topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
json = orjson.loads(result.content)
|
|
|
|
first_message_id_allowed_to_move = json["first_message_id_allowed_to_move"]
|
|
|
|
|
|
|
|
params_dict["propagate_mode"] = "change_later"
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{first_message_id_allowed_to_move}",
|
|
|
|
params_dict,
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check message count in old topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, old_topic)
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
|
|
|
|
# Check message count in new topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
self.login("shiva")
|
|
|
|
# Move these messages to the original topic and stream, to test the case
|
|
|
|
# when user is moderator.
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"topic": old_topic,
|
|
|
|
"stream_id": old_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
params_dict["propagate_mode"] = "change_all"
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
params_dict,
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check message count in old topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, old_topic)
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
# Check message count in new topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
|
|
|
|
# Test only topic editing case.
|
|
|
|
test_moving_all_topic_messages(new_topic="topic edited")
|
|
|
|
|
|
|
|
# Move these messages to the original topic to test the next case.
|
|
|
|
self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"topic": old_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test only stream editing case
|
|
|
|
test_moving_all_topic_messages(new_stream=verona)
|
|
|
|
|
|
|
|
# Move these messages to the original stream to test the next case.
|
|
|
|
self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"stream_id": denmark.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set time limit for moving messages between streams to 2 weeks.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_limit_seconds",
|
|
|
|
604800 * 2,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test editing both topic and stream together.
|
|
|
|
test_moving_all_topic_messages(new_topic="edited", new_stream=verona)
|
|
|
|
|
2023-05-09 20:22:54 +02:00
|
|
|
# Move these messages to the original stream and topic to test the next case.
|
|
|
|
self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"stream_id": denmark.id,
|
|
|
|
"topic": old_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test editing both topic and stream with no limit set.
|
|
|
|
self.login("hamlet")
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_within_stream_limit_seconds",
|
|
|
|
None,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_limit_seconds",
|
|
|
|
None,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
new_stream = verona
|
|
|
|
new_topic = "edited"
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"topic": new_topic,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
# Check message count in old topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, old_topic)
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
# Check message count in new topic and/or stream.
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
|
2022-10-14 12:18:37 +02:00
|
|
|
def test_change_all_propagate_mode_for_moving_from_stream_with_restricted_history(self) -> None:
|
|
|
|
self.make_stream("privatestream", invite_only=True, history_public_to_subscribers=False)
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.subscribe(iago, "privatestream")
|
|
|
|
self.subscribe(cordelia, "privatestream")
|
|
|
|
id1 = self.send_stream_message(iago, "privatestream", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(iago, "privatestream", topic_name="topic1")
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.subscribe(hamlet, "privatestream")
|
|
|
|
id3 = self.send_stream_message(iago, "privatestream", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(hamlet, "privatestream", topic_name="topic1")
|
|
|
|
self.send_stream_message(hamlet, "privatestream", topic_name="topic1")
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id1)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=10)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id2)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=9)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id3)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=8)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
message = Message.objects.get(id=id4)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(days=6)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You only have permission to move the 2/3 most recent messages in this topic.",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{id4}",
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You only have permission to move the 2/5 most recent messages in this topic.",
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-10-04 22:04:38 +02:00
|
|
|
"iago",
|
|
|
|
"test move stream",
|
|
|
|
"new stream",
|
|
|
|
"test",
|
|
|
|
# Set the user's translation language to German to test that
|
|
|
|
# it is overridden by the realm's default language.
|
|
|
|
"de",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2021-10-04 22:04:38 +02:00
|
|
|
HTTP_ACCEPT_LANGUAGE="de",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-10-22 05:32:52 +02:00
|
|
|
def test_move_message_to_preexisting_topic(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
|
|
|
"iago",
|
|
|
|
"test move stream",
|
|
|
|
"new stream",
|
|
|
|
"test",
|
|
|
|
# Set the user's translation language to German to test that
|
|
|
|
# it is overridden by the realm's default language.
|
|
|
|
"de",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=self.example_user("iago"),
|
|
|
|
stream_name="new stream",
|
|
|
|
topic_name="test",
|
|
|
|
content="Always here",
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
},
|
|
|
|
HTTP_ACCEPT_LANGUAGE="de",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
|
|
|
f"3 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2021-04-09 15:31:07 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_to_another_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
lear_realm = get_realm("lear")
|
|
|
|
new_stream = self.make_stream("new", lear_realm)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2021-04-09 15:31:07 +02:00
|
|
|
|
|
|
|
def test_move_message_realm_admin_cant_move_to_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
new_stream = self.make_stream("new", invite_only=True)
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2021-04-09 15:31:07 +02:00
|
|
|
|
2021-04-10 17:04:53 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_from_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
self.make_stream("privatestream", invite_only=True)
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You don't have permission to move this message due to missing access to its stream",
|
|
|
|
)
|
|
|
|
|
2021-05-13 16:23:26 +02:00
|
|
|
def test_move_message_from_private_stream_message_access_checks(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
private_stream = self.make_stream(
|
|
|
|
"privatestream", invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
|
|
|
self.subscribe(hamlet, "privatestream")
|
|
|
|
original_msg_id = self.send_stream_message(hamlet, "privatestream", topic_name="test123")
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
new_msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
|
|
|
|
# Now we unsub and hamlet sends a new message (we won't have access to it even after re-subbing!)
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
new_inaccessible_msg_id = self.send_stream_message(
|
|
|
|
hamlet, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Re-subscribe and send another message:
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
newest_msg_id = self.send_stream_message(
|
|
|
|
user_profile, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(new_msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(Message.objects.get(id=new_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
self.assertEqual(Message.objects.get(id=newest_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
# The original message and the new, inaccessible message weren't moved,
|
|
|
|
# because user_profile doesn't have access to them.
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=original_msg_id).recipient_id, private_stream.recipient_id
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=new_inaccessible_msg_id).recipient_id,
|
|
|
|
private_stream.recipient_id,
|
|
|
|
)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_change_later(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id_later}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 2)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"2 messages were moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"2 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-10-22 05:32:52 +02:00
|
|
|
def test_move_message_to_preexisting_topic_change_later(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=self.example_user("iago"),
|
|
|
|
stream_name="new stream",
|
|
|
|
topic_name="test",
|
|
|
|
content="Always here",
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id_later}",
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
|
|
|
f"2 messages were moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"2 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-02-09 21:48:30 +01:00
|
|
|
def test_move_message_to_stream_change_later_all_moved(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
2022-02-09 21:48:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-10-22 05:32:52 +02:00
|
|
|
def test_move_message_to_preexisting_topic_change_later_all_moved(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=self.example_user("iago"),
|
|
|
|
stream_name="new stream",
|
|
|
|
topic_name="test",
|
|
|
|
content="Always here",
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{msg_id}",
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
|
|
|
f"3 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-01-15 22:33:32 +01:00
|
|
|
def test_move_message_to_stream_change_one(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_one",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
2022-01-15 22:33:32 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"A message was moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2023-03-12 18:12:10 +01:00
|
|
|
message = {
|
|
|
|
"id": msg_id_later,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"display_recipient": new_stream.name,
|
|
|
|
"topic": "test",
|
|
|
|
}
|
|
|
|
moved_message_link = near_stream_message_url(messages[1].realm, message)
|
2022-01-15 22:33:32 +01:00
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
2023-03-12 18:12:10 +01:00
|
|
|
f"[A message]({moved_message_link}) was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2022-01-15 22:33:32 +01:00
|
|
|
)
|
|
|
|
|
2022-10-22 05:32:52 +02:00
|
|
|
def test_move_message_to_preexisting_topic_change_one(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=self.example_user("iago"),
|
|
|
|
stream_name="new stream",
|
|
|
|
topic_name="test",
|
|
|
|
content="Always here",
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"A message was moved from this topic to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2023-03-12 18:12:10 +01:00
|
|
|
message = {
|
|
|
|
"id": msg_id_later,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"display_recipient": new_stream.name,
|
|
|
|
"topic": "test",
|
|
|
|
}
|
|
|
|
moved_message_link = near_stream_message_url(messages[2].realm, message)
|
2022-10-22 05:32:52 +02:00
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
2023-03-12 18:12:10 +01:00
|
|
|
f"[A message]({moved_message_link}) was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2022-10-22 05:32:52 +02:00
|
|
|
)
|
|
|
|
|
2022-01-15 22:33:32 +01:00
|
|
|
def test_move_message_to_stream_change_all(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
2022-01-15 22:33:32 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-10-22 05:32:52 +02:00
|
|
|
def test_move_message_to_preexisting_topic_change_all(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=self.example_user("iago"),
|
|
|
|
stream_name="new stream",
|
|
|
|
topic_name="test",
|
|
|
|
content="Always here",
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
|
|
|
f"3 messages were moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def test_move_message_between_streams_policy_setting(self) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-04-08 21:30:40 +02:00
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def check_move_message_according_to_policy(role: int, expect_fail: bool = False) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if expect_fail:
|
|
|
|
self.assert_json_error(result, "You don't have permission to move this message")
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-04-08 21:30:40 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-04-08 21:30:40 +02:00
|
|
|
|
2022-09-29 09:03:12 +02:00
|
|
|
# Check sending messages when policy is Realm.POLICY_NOBODY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_NOBODY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_REALM_OWNER, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(
|
|
|
|
UserProfile.ROLE_REALM_ADMINISTRATOR, expect_fail=True
|
|
|
|
)
|
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
# Check sending messages when policy is Realm.POLICY_ADMINS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_ADMINS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-04-08 21:30:40 +02:00
|
|
|
# Check sending messages when policy is Realm.POLICY_MODERATORS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MODERATORS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_FULL_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_GUEST, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-12-22 08:25:29 +01:00
|
|
|
def test_move_message_to_stream_time_limit(self) -> None:
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
|
|
|
|
test_stream_1 = self.make_stream("test_stream_1")
|
|
|
|
test_stream_2 = self.make_stream("test_stream_2")
|
|
|
|
|
|
|
|
self.subscribe(shiva, test_stream_1.name)
|
|
|
|
self.subscribe(iago, test_stream_1.name)
|
|
|
|
self.subscribe(cordelia, test_stream_1.name)
|
|
|
|
self.subscribe(shiva, test_stream_2.name)
|
|
|
|
self.subscribe(iago, test_stream_2.name)
|
|
|
|
self.subscribe(cordelia, test_stream_2.name)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
cordelia, test_stream_1.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(cordelia, test_stream_1.name, topic_name="test", content="Second")
|
|
|
|
|
|
|
|
self.send_stream_message(cordelia, test_stream_1.name, topic_name="test", content="third")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
cordelia.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
def check_move_message_to_stream(
|
|
|
|
user: UserProfile,
|
|
|
|
old_stream: Stream,
|
|
|
|
new_stream: Stream,
|
|
|
|
*,
|
|
|
|
expect_error_message: Optional[str] = None,
|
|
|
|
) -> None:
|
|
|
|
self.login_user(user)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_new_thread": orjson.dumps(False).decode(),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if expect_error_message is not None:
|
|
|
|
self.assert_json_error(result, expect_error_message)
|
|
|
|
messages = get_topic_messages(user, old_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
messages = get_topic_messages(user, new_stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user, old_stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
messages = get_topic_messages(user, new_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
# non-admin and non-moderator users cannot move messages sent > 1 week ago
|
|
|
|
# including sender of the message.
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=604900)
|
|
|
|
message.save()
|
|
|
|
check_move_message_to_stream(
|
|
|
|
cordelia,
|
|
|
|
test_stream_1,
|
|
|
|
test_stream_2,
|
|
|
|
expect_error_message="The time limit for editing this message's stream has passed",
|
|
|
|
)
|
|
|
|
|
|
|
|
# admins and moderators can move messages irrespective of time limit.
|
|
|
|
check_move_message_to_stream(shiva, test_stream_1, test_stream_2, expect_error_message=None)
|
|
|
|
check_move_message_to_stream(iago, test_stream_2, test_stream_1, expect_error_message=None)
|
|
|
|
|
|
|
|
# set the topic edit limit to two weeks
|
|
|
|
do_set_realm_property(
|
|
|
|
cordelia.realm,
|
|
|
|
"move_messages_between_streams_limit_seconds",
|
|
|
|
604800 * 2,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(
|
|
|
|
cordelia, test_stream_1, test_stream_2, expect_error_message=None
|
|
|
|
)
|
|
|
|
|
2021-05-04 21:58:01 +02:00
|
|
|
def test_move_message_to_stream_based_on_stream_post_policy(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
def check_move_message_to_stream(role: int, error_msg: Optional[str] = None) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if error_msg is not None:
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-05-04 21:58:01 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-05-04 21:58:01 +02:00
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_ADMINS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "Only organization administrators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_MODERATORS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_MODERATORS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "Only organization administrators and moderators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
error_msg = "New members cannot send to this stream."
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
|
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_EVERYONE.
|
|
|
|
# In this case also, guest is not allowed as we do not allow guest to move
|
|
|
|
# messages between streams in any case, so stream_post_policy of new stream does
|
|
|
|
# not matter.
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(
|
|
|
|
new_stream, Stream.STREAM_POST_POLICY_EVERYONE, acting_user=user_profile
|
|
|
|
)
|
2021-05-04 21:58:01 +02:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(
|
|
|
|
UserProfile.ROLE_GUEST, "You don't have permission to move this message"
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
2021-05-26 09:16:57 +02:00
|
|
|
def test_move_message_to_stream_with_topic_editing_not_allowed(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
realm = user_profile.realm
|
2021-05-26 12:21:37 +02:00
|
|
|
realm.edit_topic_policy = Realm.POLICY_ADMINS_ONLY
|
2021-05-26 09:16:57 +02:00
|
|
|
realm.save()
|
|
|
|
self.login("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-05-26 09:16:57 +02:00
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-05-26 09:16:57 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_and_topic(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2023-04-09 20:58:00 +02:00
|
|
|
with self.assert_database_query_count(57), cache_tries_captured() as cache_tries:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"propagate_mode": "change_all",
|
2022-07-17 13:34:04 +02:00
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"stream_id": new_stream.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(cache_tries, 13)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>new topic** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "new topic")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_inaccessible_msg_after_stream_change(self) -> None:
|
|
|
|
"""Simulates the case where message is moved to a stream where user is not a subscribed"""
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
non_guest_user = self.example_user("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(guest_user, old_stream.name)
|
|
|
|
self.subscribe(non_guest_user, old_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_to_test_acesss = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile, old_stream.name, topic_name="test", content="fourth"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user, Message.objects.get(id=msg_id_to_test_acesss), has_user_message=False
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
False,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
# If the guest user were subscribed to the new stream,
|
|
|
|
# they'd have access; has_message_access does not validate
|
|
|
|
# the is_subscribed parameter.
|
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
2021-05-12 23:07:07 +02:00
|
|
|
has_user_message=False,
|
2021-05-12 00:31:03 +02:00
|
|
|
stream=new_stream,
|
|
|
|
is_subscribed=True,
|
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=new_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
False,
|
|
|
|
)
|
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
# Raises assertion if you pass an invalid stream.
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=non_guest_user.id,
|
|
|
|
message_id=msg_id_to_test_acesss,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
self.example_user("iago"),
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_no_notify_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_new_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "true",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_old_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
2022-01-15 22:33:32 +01:00
|
|
|
f"This topic was moved to #**new stream>test** by @_**Iago|{user_profile.id}**.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2022-07-17 13:34:04 +02:00
|
|
|
def test_notify_new_topic(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_notify_old_topic(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
def test_notify_both_topics(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_notify_no_topic(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 0)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
def test_notify_new_topics_after_message_move(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].content, "Second")
|
|
|
|
self.assertEqual(messages[1].content, "Third")
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
2023-03-12 18:12:10 +01:00
|
|
|
message = {
|
|
|
|
"id": msg_id,
|
|
|
|
"stream_id": stream.id,
|
|
|
|
"display_recipient": stream.name,
|
|
|
|
"topic": "edited",
|
|
|
|
}
|
|
|
|
moved_message_link = near_stream_message_url(messages[1].realm, message)
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].content, "First")
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
2023-03-12 18:12:10 +01:00
|
|
|
f"[A message]({moved_message_link}) was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
|
2022-07-17 13:34:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_notify_old_topics_after_message_move(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].content, "Second")
|
|
|
|
self.assertEqual(messages[1].content, "Third")
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"A message was moved from this topic to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(messages[0].content, "First")
|
|
|
|
|
|
|
|
def test_notify_both_topics_after_message_move(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "true",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(messages[0].content, "Second")
|
|
|
|
self.assertEqual(messages[1].content, "Third")
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"A message was moved from this topic to #**public stream>edited** by @_**Iago|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
2023-03-12 18:12:10 +01:00
|
|
|
message = {
|
|
|
|
"id": msg_id,
|
|
|
|
"stream_id": stream.id,
|
|
|
|
"display_recipient": stream.name,
|
|
|
|
"topic": "edited",
|
|
|
|
}
|
|
|
|
moved_message_link = near_stream_message_url(messages[0].realm, message)
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].content, "First")
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
2023-03-12 18:12:10 +01:00
|
|
|
f"[A message]({moved_message_link}) was moved here from #**public stream>test** by @_**Iago|{user_profile.id}**.",
|
2022-07-17 13:34:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_notify_no_topic_after_message_move(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Second")
|
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name="test", content="Third")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_one",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "false",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "test")
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].content, "Second")
|
|
|
|
self.assertEqual(messages[1].content, "Third")
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, "edited")
|
|
|
|
self.assert_length(messages, 1)
|
|
|
|
self.assertEqual(messages[0].content, "First")
|
|
|
|
|
2022-11-12 23:49:11 +01:00
|
|
|
def test_notify_resolve_topic_long_name(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
# Marking topics with a long name as resolved causes the new topic name to be truncated.
|
|
|
|
# We want to avoid having code paths believing that the topic is "moved" instead of
|
|
|
|
# "resolved" in this edge case.
|
|
|
|
topic_name = "a" * MAX_TOPIC_NAME_LENGTH
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic_name, content="First"
|
|
|
|
)
|
|
|
|
|
|
|
|
resolved_topic = RESOLVED_TOPIC_PREFIX + topic_name
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": resolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
new_topic_name = truncate_topic(resolved_topic)
|
|
|
|
messages = get_topic_messages(user_profile, stream, new_topic_name)
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(messages[0].content, "First")
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Note that we are removing the prefix from the already truncated topic,
|
|
|
|
# so unresolved_topic_name will not be the same as the original topic_name
|
|
|
|
unresolved_topic_name = new_topic_name.replace(RESOLVED_TOPIC_PREFIX, "")
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": unresolved_topic_name,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, unresolved_topic_name)
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as unresolved.",
|
|
|
|
)
|
|
|
|
|
2022-11-15 02:49:15 +01:00
|
|
|
def test_notify_resolve_and_move_topic(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
stream = self.make_stream("public stream")
|
|
|
|
topic = "test"
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
|
|
|
|
# Resolve a topic normally first
|
|
|
|
msg_id = self.send_stream_message(user_profile, stream.name, "foo", topic_name=topic)
|
|
|
|
resolved_topic = RESOLVED_TOPIC_PREFIX + topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": resolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, stream, resolved_topic)
|
|
|
|
self.assert_length(messages, 2)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test unresolving a topic while moving it (✔ test -> bar)
|
|
|
|
new_topic = "bar"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, stream, new_topic)
|
|
|
|
self.assert_length(messages, 4)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as unresolved.",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**public stream>✔ test** by @_**{user_profile.full_name}|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now test moving the topic while also resolving it (bar -> ✔ baz)
|
|
|
|
new_resolved_topic = RESOLVED_TOPIC_PREFIX + "baz"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"topic": new_resolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, stream, new_resolved_topic)
|
|
|
|
self.assert_length(messages, 6)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[5].content,
|
|
|
|
f"This topic was moved here from #**public stream>{new_topic}** by @_**{user_profile.full_name}|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2022-12-20 07:26:39 +01:00
|
|
|
def test_notify_resolve_topic_and_move_stream(self) -> None:
|
|
|
|
(
|
|
|
|
user_profile,
|
|
|
|
first_stream,
|
|
|
|
second_stream,
|
|
|
|
msg_id,
|
|
|
|
msg_id_later,
|
|
|
|
) = self.prepare_move_topics("iago", "first stream", "second stream", "test")
|
|
|
|
|
|
|
|
# 'prepare_move_topics' sends 3 messages in the first_stream
|
|
|
|
messages = get_topic_messages(user_profile, first_stream, "test")
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
|
|
|
|
# Test resolving a topic (test -> ✔ test) while changing stream (first_stream -> second_stream)
|
|
|
|
new_topic = "✔ test"
|
|
|
|
new_stream = second_stream
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 5)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
|
|
|
f"This topic was moved here from #**{first_stream.name}>test** by @_**{user_profile.full_name}|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Test unresolving a topic (✔ test -> test) while changing stream (second_stream -> first_stream)
|
|
|
|
new_topic = "test"
|
|
|
|
new_stream = first_stream
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, new_topic)
|
|
|
|
self.assert_length(messages, 7)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[5].content,
|
|
|
|
f"@_**{user_profile.full_name}|{user_profile.id}** has marked this topic as unresolved.",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[6].content,
|
|
|
|
f"This topic was moved here from #**{second_stream.name}>✔ test** by @_**{user_profile.full_name}|{user_profile.id}**.",
|
|
|
|
)
|
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def parameterized_test_move_message_involving_private_stream(
|
|
|
|
self,
|
|
|
|
from_invite_only: bool,
|
|
|
|
history_public_to_subscribers: bool,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created: bool,
|
2021-02-12 08:19:30 +01:00
|
|
|
to_invite_only: bool = True,
|
2020-09-03 20:57:49 +02:00
|
|
|
) -> None:
|
|
|
|
admin_user = self.example_user("iago")
|
2021-02-12 08:20:45 +01:00
|
|
|
user_losing_access = self.example_user("cordelia")
|
|
|
|
user_gaining_access = self.example_user("hamlet")
|
2020-09-03 20:57:49 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login("iago")
|
2020-09-03 20:57:49 +02:00
|
|
|
old_stream = self.make_stream("test move stream", invite_only=from_invite_only)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_stream = self.make_stream(
|
|
|
|
"new stream",
|
|
|
|
invite_only=to_invite_only,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
self.subscribe(admin_user, old_stream.name)
|
|
|
|
self.subscribe(user_losing_access, old_stream.name)
|
|
|
|
|
|
|
|
self.subscribe(admin_user, new_stream.name)
|
|
|
|
self.subscribe(user_gaining_access, new_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
admin_user, old_stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(admin_user, old_stream.name, topic_name="test", content="Second")
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
2021-02-03 14:07:36 +01:00
|
|
|
f"/json/messages/{msg_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
self.assert_json_success(result)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, old_stream, "test")
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
# When the history is shared, UserMessage is not created for the user but the user
|
|
|
|
# can see the message.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1 if user_messages_created else 0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
def test_move_message_from_public_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def test_move_message_from_public_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_public(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
to_invite_only=False,
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2021-04-08 19:33:22 +02:00
|
|
|
def test_can_move_messages_between_streams(self) -> None:
|
|
|
|
def validation_func(user_profile: UserProfile) -> bool:
|
|
|
|
user_profile.refresh_from_db()
|
|
|
|
return user_profile.can_move_messages_between_streams()
|
|
|
|
|
|
|
|
self.check_has_permission_policies("move_messages_between_streams_policy", validation_func)
|
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
def test_mark_topic_as_resolved(self) -> None:
|
|
|
|
self.login("iago")
|
|
|
|
admin_user = self.example_user("iago")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2022-01-29 01:22:09 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
aaron = self.example_user("aaron")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
2021-10-04 22:16:14 +02:00
|
|
|
# Set the user's translation language to German to test that
|
|
|
|
# it is overridden by the realm's default language.
|
|
|
|
admin_user.default_language = "de"
|
|
|
|
admin_user.save()
|
2021-06-11 20:50:03 +02:00
|
|
|
stream = self.make_stream("new")
|
|
|
|
self.subscribe(admin_user, stream.name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(hamlet, stream.name)
|
2022-01-29 01:22:09 +01:00
|
|
|
self.subscribe(cordelia, stream.name)
|
|
|
|
self.subscribe(aaron, stream.name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
original_topic = "topic 1"
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
id1 = self.send_stream_message(hamlet, "new", topic_name=original_topic)
|
2021-06-11 20:50:03 +02:00
|
|
|
id2 = self.send_stream_message(admin_user, "new", topic_name=original_topic)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
msg1 = Message.objects.get(id=id1)
|
|
|
|
do_add_reaction(aaron, msg1, "tada", "1f389", "unicode_emoji")
|
|
|
|
|
2021-06-21 21:11:32 +02:00
|
|
|
# Check that we don't incorrectly send "unresolve topic"
|
|
|
|
# notifications when asking the preserve the current topic.
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": original_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
resolved_topic = RESOLVED_TOPIC_PREFIX + original_topic
|
2022-10-31 22:13:26 +01:00
|
|
|
result = self.resolve_topic_containing_message(
|
|
|
|
admin_user,
|
|
|
|
id1,
|
2021-10-04 22:16:14 +02:00
|
|
|
HTTP_ACCEPT_LANGUAGE="de",
|
2021-06-11 20:50:03 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
resolved_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, resolved_topic)
|
|
|
|
self.assert_length(messages, 3)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
|
|
|
|
)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
# Check topic resolved notification message is only unread for participants.
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[2].id
|
|
|
|
)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 3
|
|
|
|
)
|
|
|
|
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[2].id)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 0
|
|
|
|
)
|
|
|
|
|
2022-07-17 13:34:04 +02:00
|
|
|
# Now move to a weird state and confirm we get the normal topic moved message.
|
2021-06-21 21:27:26 +02:00
|
|
|
weird_topic = "✔ ✔✔" + original_topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": weird_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
weird_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, weird_topic)
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-06-21 21:27:26 +02:00
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as resolved.",
|
|
|
|
)
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**new>✔ topic 1** by @_**Iago|{admin_user.id}**.",
|
|
|
|
)
|
2021-06-21 21:27:26 +02:00
|
|
|
|
2021-06-11 20:50:03 +02:00
|
|
|
unresolved_topic = original_topic
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"topic": unresolved_topic,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for msg_id in [id1, id2]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(
|
|
|
|
unresolved_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
|
|
|
|
messages = get_topic_messages(admin_user, stream, unresolved_topic)
|
2022-07-17 13:34:04 +02:00
|
|
|
self.assert_length(messages, 5)
|
2021-06-11 20:50:03 +02:00
|
|
|
self.assertEqual(
|
2022-07-17 13:34:04 +02:00
|
|
|
messages[2].content, f"@_**Iago|{admin_user.id}** has marked this topic as resolved."
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
messages[4].content,
|
2021-06-11 20:50:03 +02:00
|
|
|
f"@_**Iago|{admin_user.id}** has marked this topic as unresolved.",
|
|
|
|
)
|
|
|
|
|
2022-01-29 01:22:09 +01:00
|
|
|
# Check topic unresolved notification message is only unread for participants.
|
|
|
|
assert (
|
|
|
|
UserMessage.objects.filter(
|
2022-07-17 13:34:04 +02:00
|
|
|
user_profile__in=[admin_user, hamlet, aaron], message__id=messages[4].id
|
2022-01-29 01:22:09 +01:00
|
|
|
)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 3
|
|
|
|
)
|
|
|
|
|
|
|
|
assert (
|
2022-07-17 13:34:04 +02:00
|
|
|
UserMessage.objects.filter(user_profile=cordelia, message__id=messages[4].id)
|
2022-01-29 01:22:09 +01:00
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
== 0
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-04 03:20:40 +02:00
|
|
|
class DeleteMessageTest(ZulipTestCase):
|
|
|
|
def test_delete_message_invalid_request_format(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id + 1}", {"message_id": msg_id})
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_delete_message_by_user(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_deleting_params(
|
2021-06-08 13:45:14 +02:00
|
|
|
delete_own_message_policy: int, message_content_delete_limit_seconds: Union[int, str]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-06-08 13:45:14 +02:00
|
|
|
"delete_own_message_policy": delete_own_message_policy,
|
2021-06-14 18:49:28 +02:00
|
|
|
"message_content_delete_limit_seconds": orjson.dumps(
|
|
|
|
message_content_delete_limit_seconds
|
|
|
|
).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_admin(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_owner(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_delete_message_by_other_user(msg_id: int) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
# Test if message deleting is not allowed(default).
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_ADMINS_ONLY, "unlimited")
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.login_user(hamlet)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if message deleting is allowed.
|
2021-06-14 18:49:28 +02:00
|
|
|
# Test if time limit is None(no limit).
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_EVERYONE, "unlimited")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if time limit is non-zero.
|
2021-06-08 13:45:14 +02:00
|
|
|
set_message_deleting_params(Realm.POLICY_EVERYONE, 240)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id_1 = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id_1)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
|
|
|
|
message.save()
|
|
|
|
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
message = Message.objects.get(id=msg_id_2)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id_1)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_1)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_2)
|
|
|
|
self.assert_json_error(result, "The time limit for deleting this message has passed")
|
|
|
|
|
|
|
|
# No limit for admin.
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id_2)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test multiple delete requests with no latency issues
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Denmark")
|
2020-07-04 03:20:40 +02:00
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
|
|
|
|
|
|
|
# Test handling of 500 error caused by multiple delete requests due to latency.
|
|
|
|
# see issue #11219.
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch("zerver.views.message_edit.do_delete_messages") as m, mock.patch(
|
|
|
|
"zerver.views.message_edit.validate_can_delete_message", return_value=None
|
|
|
|
), mock.patch("zerver.views.message_edit.access_message", return_value=(None, None)):
|
2020-07-04 03:20:40 +02:00
|
|
|
m.side_effect = IntegrityError()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|
|
|
|
m.side_effect = Message.DoesNotExist()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|
2021-06-03 15:46:13 +02:00
|
|
|
|
2021-06-21 18:52:51 +02:00
|
|
|
def test_delete_message_according_to_delete_own_message_policy(self) -> None:
|
|
|
|
def check_delete_message_by_sender(
|
|
|
|
sender_name: str, error_msg: Optional[str] = None
|
|
|
|
) -> None:
|
|
|
|
sender = self.example_user(sender_name)
|
|
|
|
msg_id = self.send_stream_message(sender, "Verona")
|
|
|
|
self.login_user(sender)
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
|
|
|
if error_msg is None:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
else:
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_ADMINS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("shiva", "You don't have permission to delete this message")
|
|
|
|
check_delete_message_by_sender("iago")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_MODERATORS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"cordelia", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("shiva")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_MEMBERS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"polonius", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_FULL_MEMBERS_ONLY, acting_user=None
|
|
|
|
)
|
|
|
|
do_set_realm_property(realm, "waiting_period_threshold", 10, acting_user=None)
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=9)
|
|
|
|
cordelia.save()
|
|
|
|
check_delete_message_by_sender(
|
|
|
|
"cordelia", "You don't have permission to delete this message"
|
|
|
|
)
|
|
|
|
cordelia.date_joined = timezone_now() - datetime.timedelta(days=11)
|
|
|
|
cordelia.save()
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "delete_own_message_policy", Realm.POLICY_EVERYONE, acting_user=None
|
|
|
|
)
|
|
|
|
check_delete_message_by_sender("cordelia")
|
|
|
|
check_delete_message_by_sender("polonius")
|
|
|
|
|
2021-06-03 15:46:13 +02:00
|
|
|
def test_delete_event_sent_after_transaction_commits(self) -> None:
|
|
|
|
"""
|
|
|
|
Tests that `send_event` is hooked to `transaction.on_commit`. This is important, because
|
|
|
|
we don't want to end up holding locks on message rows for too long if the event queue runs
|
|
|
|
into a problem.
|
|
|
|
"""
|
|
|
|
hamlet = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.send_stream_message(hamlet, "Denmark")
|
2021-06-03 15:46:13 +02:00
|
|
|
message = self.get_last_message()
|
|
|
|
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=1):
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
with mock.patch("zerver.tornado.django_api.queue_json_publish") as m:
|
2021-06-03 15:46:13 +02:00
|
|
|
m.side_effect = AssertionError(
|
|
|
|
"Events should be sent only after the transaction commits."
|
|
|
|
)
|
|
|
|
do_delete_messages(hamlet.realm, [message])
|