2020-07-03 11:09:37 +02:00
|
|
|
import datetime
|
|
|
|
from operator import itemgetter
|
2021-05-04 21:58:01 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2020-07-03 11:09:37 +02:00
|
|
|
from unittest import mock
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-07-04 03:20:40 +02:00
|
|
|
from django.db import IntegrityError
|
|
|
|
from django.http import HttpResponse
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
2021-05-04 21:58:01 +02:00
|
|
|
do_change_stream_post_policy,
|
2021-04-08 21:30:40 +02:00
|
|
|
do_change_user_role,
|
2020-07-03 11:09:37 +02:00
|
|
|
do_set_realm_property,
|
|
|
|
do_update_message,
|
|
|
|
get_topic_messages,
|
|
|
|
get_user_info_for_message_updates,
|
|
|
|
)
|
|
|
|
from zerver.lib.message import MessageDict, has_message_access, messages_for_ids
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
from zerver.lib.test_helpers import cache_tries_captured, queries_captured
|
2020-07-03 11:09:37 +02:00
|
|
|
from zerver.lib.topic import LEGACY_PREV_TOPIC, TOPIC_NAME
|
2021-04-08 21:30:40 +02:00
|
|
|
from zerver.models import Message, Realm, Stream, UserMessage, UserProfile, get_realm, get_stream
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
class EditMessageTest(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_topic(self, msg_id: int, topic_name: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_message(self, msg_id: int, topic_name: str, content: str) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
# Make sure we saved the message correctly to the DB.
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
self.assertEqual(msg.topic_name(), topic_name)
|
|
|
|
self.assertEqual(msg.content, content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
We assume our caller just edited a message.
|
|
|
|
|
|
|
|
Next, we will make sure we properly cached the messages. We still have
|
|
|
|
to do a query to hydrate recipient info, but we won't need to hit the
|
|
|
|
zerver_message table.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured(keep_cache_warm=True) as queries:
|
2020-07-03 11:09:37 +02:00
|
|
|
(fetch_message_dict,) = messages_for_ids(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids=[msg.id],
|
2020-07-03 11:09:37 +02:00
|
|
|
user_message_flags={msg_id: []},
|
2020-09-02 08:14:51 +02:00
|
|
|
search_fields={},
|
2020-07-03 11:09:37 +02:00
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
allow_edit_history=True,
|
|
|
|
)
|
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
self.assert_length(queries, 1)
|
2020-07-03 11:09:37 +02:00
|
|
|
for query in queries:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertNotIn("message", query["sql"])
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
fetch_message_dict[TOPIC_NAME],
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["content"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.content,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["sender_id"],
|
2020-07-03 11:09:37 +02:00
|
|
|
msg.sender_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
if msg.edit_history:
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_message_dict["edit_history"],
|
2020-08-07 01:09:47 +02:00
|
|
|
orjson.loads(msg.edit_history),
|
2020-07-03 11:09:37 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_query_count_on_to_dict_uncached(self) -> None:
|
|
|
|
# `to_dict_uncached` method is used by the mechanisms
|
|
|
|
# tested in this class. Hence, its performance is tested here.
|
|
|
|
# Generate 2 messages
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login_user(user)
|
|
|
|
stream_name = "public_stream"
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
message_ids = []
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user, stream_name, "Message one"))
|
2020-07-03 11:09:37 +02:00
|
|
|
user_2 = self.example_user("cordelia")
|
|
|
|
self.subscribe(user_2, stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(self.send_stream_message(user_2, stream_name, "Message two"))
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(self.notification_bot(), stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids.append(
|
|
|
|
self.send_stream_message(self.notification_bot(), stream_name, "Message three")
|
|
|
|
)
|
|
|
|
messages = [
|
|
|
|
Message.objects.select_related().get(id=message_id) for message_id in message_ids
|
|
|
|
]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check number of queries performed
|
|
|
|
with queries_captured() as queries:
|
|
|
|
MessageDict.to_dict_uncached(messages)
|
|
|
|
# 1 query for realm_id per message = 3
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 5)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
realm_id = 2 # Fetched from stream object
|
|
|
|
# Check number of queries performed with realm_id
|
|
|
|
with queries_captured() as queries:
|
|
|
|
MessageDict.to_dict_uncached(messages, realm_id)
|
|
|
|
# 1 query each for reactions & submessage for all messages = 2
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 2)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_save_message(self) -> None:
|
|
|
|
"""This is also tested by a client test, but here we can verify
|
|
|
|
the cache against the database"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_message(msg_id, topic_name="editing", content="after edit")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"topic": "edited",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(msg_id, topic_name="edited")
|
|
|
|
|
|
|
|
def test_fetch_raw_message(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
from_user=self.example_user("hamlet"),
|
|
|
|
to_user=self.example_user("cordelia"),
|
|
|
|
content="**before** edit",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result.json()["raw_content"], "**before** edit")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Test error cases
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/999999")
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_fetch_raw_message_stream_wrong_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("public_stream")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mit_user = self.mit_user("sipbtest")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login_user(mit_user)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/" + str(msg_id), subdomain="zephyr")
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_fetch_raw_message_private_stream(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = self.make_stream("private_stream", invite_only=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(user_profile, stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name="test", content="test"
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
|
|
|
result = self.client_get("/json/messages/" + str(msg_id))
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-01-31 18:23:48 +01:00
|
|
|
# Right now, we prevent users from editing widgets.
|
|
|
|
def test_edit_submessage(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Scotland",
|
|
|
|
topic_name="editing",
|
|
|
|
content="/poll Games?\nYES\nNO",
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "/poll Games?\nYES\nNO\nMaybe",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(result, "Widgets cannot be edited.")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_edit_message_no_permission(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("iago"), "Scotland", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "content after edit",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "You don't have permission to edit this message")
|
|
|
|
|
|
|
|
def test_edit_message_no_changes(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Nothing to change")
|
|
|
|
|
|
|
|
def test_edit_message_no_topic(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"topic": " ",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Topic can't be empty")
|
|
|
|
|
|
|
|
def test_edit_message_no_content(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", topic_name="editing", content="before edit"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": " ",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = Message.objects.filter(id=msg_id).values_list("content", flat=True)[0]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(content, "(deleted)")
|
|
|
|
|
|
|
|
def test_edit_message_history_disabled(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(user_profile.realm, "allow_edit_history", False, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Denmark",
|
|
|
|
topic_name="editing",
|
|
|
|
content="content before edit",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id_1,
|
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get("/json/messages/" + str(msg_id_1) + "/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Message edit history is disabled in this organization")
|
|
|
|
|
|
|
|
# Now verify that if we fetch the message directly, there's no
|
|
|
|
# edit history data attached.
|
2021-02-12 08:19:30 +01:00
|
|
|
messages_result = self.client_get(
|
|
|
|
"/json/messages", {"anchor": msg_id_1, "num_before": 0, "num_after": 10}
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(messages_result)
|
2020-10-30 01:18:43 +01:00
|
|
|
json_messages = orjson.loads(messages_result.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
for msg in json_messages["messages"]:
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertNotIn("edit_history", msg)
|
|
|
|
|
|
|
|
def test_edit_message_history(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Single-line edit
|
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Scotland",
|
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="content before edit",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "content after edit"
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id_1,
|
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
message_edit_history_1 = self.client_get("/json/messages/" + str(msg_id_1) + "/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history_1[0]["rendered_content"], "<p>content before edit</p>")
|
|
|
|
self.assertEqual(message_history_1[1]["rendered_content"], "<p>content after edit</p>")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted">after</span> '
|
|
|
|
'<span class="highlight_text_deleted">before</span>'
|
2021-02-12 08:20:45 +01:00
|
|
|
" edit</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
# Check content of message before edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["prev_rendered_content"], "<p>content before edit</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Edits on new lines
|
|
|
|
msg_id_2 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Scotland",
|
|
|
|
topic_name="editing",
|
2021-02-12 08:20:45 +01:00
|
|
|
content="content before edit, line 1\n\ncontent before edit, line 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
new_content_2 = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"content before edit, line 1\n"
|
|
|
|
"content after edit, line 2\n"
|
|
|
|
"content before edit, line 3"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result_2 = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_2),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id_2,
|
|
|
|
"content": new_content_2,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_2)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
message_edit_history_2 = self.client_get("/json/messages/" + str(msg_id_2) + "/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_2 = orjson.loads(message_edit_history_2.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2 = json_response_2["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[0]["rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["rendered_content"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content before edit, line 1<br>\n"
|
|
|
|
"content after edit, line 2<br>\n"
|
|
|
|
"content before edit, line 3</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"<p>content before edit, line 1<br> "
|
2021-02-12 08:19:30 +01:00
|
|
|
'content <span class="highlight_text_inserted">after edit, line 2<br> '
|
2021-02-12 08:20:45 +01:00
|
|
|
"content</span> before edit, line 3</p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_2[1]["prev_rendered_content"],
|
|
|
|
"<p>content before edit, line 1</p>\n<p>content before edit, line 3</p>",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_link(self) -> None:
|
|
|
|
# Link editing
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
msg_id_1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Scotland",
|
|
|
|
topic_name="editing",
|
2021-02-12 08:19:30 +01:00
|
|
|
content="Here is a link to [zulip](www.zulip.org).",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_content_1 = "Here is a link to [zulip](www.zulipchat.com)."
|
2021-02-12 08:19:30 +01:00
|
|
|
result_1 = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id_1,
|
|
|
|
"content": new_content_1,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result_1)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
message_edit_history_1 = self.client_get("/json/messages/" + str(msg_id_1) + "/history")
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response_1 = orjson.loads(message_edit_history_1.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1 = json_response_1["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Check content of message after edit.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[0]["rendered_content"],
|
|
|
|
"<p>Here is a link to " '<a href="http://www.zulip.org">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["rendered_content"],
|
|
|
|
"<p>Here is a link to " '<a href="http://www.zulipchat.com">zulip</a>.</p>',
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history_1[1]["content_html_diff"],
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
'<p>Here is a link to <a href="http://www.zulipchat.com"'
|
2021-02-12 08:20:45 +01:00
|
|
|
">zulip "
|
2021-02-12 08:19:30 +01:00
|
|
|
'<span class="highlight_text_inserted"> Link: http://www.zulipchat.com .'
|
|
|
|
'</span> <span class="highlight_text_deleted"> Link: http://www.zulip.org .'
|
2021-02-12 08:20:45 +01:00
|
|
|
"</span> </a></p>"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_history_unedited(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
msg_id = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Scotland",
|
|
|
|
topic_name="editing",
|
|
|
|
content="This message has not been edited.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get(f"/json/messages/{msg_id}/history")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history = result.json()["message_history"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_length(message_history, 1)
|
|
|
|
|
|
|
|
def test_user_info_for_updates(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.subscribe(hamlet, "Scotland")
|
|
|
|
self.subscribe(cordelia, "Scotland")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-11 16:26:54 +02:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
hamlet, "Scotland", content="@**Cordelia, Lear's daughter**"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
user_info = get_user_info_for_message_updates(msg_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
message_user_ids = user_info["message_user_ids"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertIn(hamlet.id, message_user_ids)
|
|
|
|
self.assertIn(cordelia.id, message_user_ids)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mention_user_ids = user_info["mention_user_ids"]
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(mention_user_ids, {cordelia.id})
|
|
|
|
|
|
|
|
def test_edit_cases(self) -> None:
|
|
|
|
"""This test verifies the accuracy of construction of Zulip's edit
|
|
|
|
history data structures."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", topic_name="topic 1", content="content 1"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "content 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 1")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"topic": "topic 2",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0][LEGACY_PREV_TOPIC], "topic 1")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
|
|
|
self.assertEqual(set(history[0].keys()), {"timestamp", LEGACY_PREV_TOPIC, "user_id"})
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "content 3",
|
|
|
|
"topic": "topic 3",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 2")
|
|
|
|
self.assertEqual(history[0][LEGACY_PREV_TOPIC], "topic 2")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(history[0].keys()),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"timestamp",
|
2021-02-12 08:19:30 +01:00
|
|
|
LEGACY_PREV_TOPIC,
|
2021-02-12 08:20:45 +01:00
|
|
|
"prev_content",
|
|
|
|
"user_id",
|
|
|
|
"prev_rendered_content",
|
|
|
|
"prev_rendered_content_version",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": "content 4",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(history[0]["user_id"], hamlet.id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"topic": "topic 4",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0][LEGACY_PREV_TOPIC], "topic 3")
|
|
|
|
self.assertEqual(history[0]["user_id"], self.example_user("iago").id)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
history = orjson.loads(Message.objects.get(id=msg_id).edit_history)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(history[0][LEGACY_PREV_TOPIC], "topic 3")
|
|
|
|
self.assertEqual(history[2][LEGACY_PREV_TOPIC], "topic 2")
|
|
|
|
self.assertEqual(history[3][LEGACY_PREV_TOPIC], "topic 1")
|
|
|
|
self.assertEqual(history[1]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(history[2]["prev_content"], "content 2")
|
|
|
|
self.assertEqual(history[4]["prev_content"], "content 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# Now, we verify that the edit history data sent back has the
|
|
|
|
# correct filled-out fields
|
|
|
|
message_edit_history = self.client_get("/json/messages/" + str(msg_id) + "/history")
|
|
|
|
|
2020-10-30 01:18:43 +01:00
|
|
|
json_response = orjson.loads(message_edit_history.content)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# We reverse the message history view output so that the IDs line up with the above.
|
2021-02-12 08:20:45 +01:00
|
|
|
message_history = list(reversed(json_response["message_history"]))
|
2020-07-03 11:09:37 +02:00
|
|
|
i = 0
|
|
|
|
for entry in message_history:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"}
|
2020-07-03 11:09:37 +02:00
|
|
|
if i in {0, 2, 3}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_topic")
|
2020-07-03 11:09:37 +02:00
|
|
|
if i in {1, 2, 4}:
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_entries.add("prev_content")
|
|
|
|
expected_entries.add("prev_rendered_content")
|
|
|
|
expected_entries.add("content_html_diff")
|
2020-07-03 11:09:37 +02:00
|
|
|
i += 1
|
|
|
|
self.assertEqual(expected_entries, set(entry.keys()))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(message_history, 6)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_history[0]["prev_topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[0]["topic"], "topic 4")
|
|
|
|
self.assertEqual(message_history[1]["topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[2]["topic"], "topic 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_topic"], "topic 2")
|
|
|
|
self.assertEqual(message_history[3]["topic"], "topic 2")
|
|
|
|
self.assertEqual(message_history[3]["prev_topic"], "topic 1")
|
|
|
|
self.assertEqual(message_history[4]["topic"], "topic 1")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[0]["content"], "content 4")
|
|
|
|
self.assertEqual(message_history[1]["content"], "content 4")
|
|
|
|
self.assertEqual(message_history[1]["prev_content"], "content 3")
|
|
|
|
self.assertEqual(message_history[2]["content"], "content 3")
|
|
|
|
self.assertEqual(message_history[2]["prev_content"], "content 2")
|
|
|
|
self.assertEqual(message_history[3]["content"], "content 2")
|
|
|
|
self.assertEqual(message_history[4]["content"], "content 2")
|
|
|
|
self.assertEqual(message_history[4]["prev_content"], "content 1")
|
|
|
|
|
|
|
|
self.assertEqual(message_history[5]["content"], "content 1")
|
|
|
|
self.assertEqual(message_history[5]["topic"], "topic 1")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_edit_message_content_limit(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
|
|
|
message_content_edit_limit_seconds: int,
|
|
|
|
allow_community_topic_editing: bool,
|
|
|
|
) -> None:
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
|
|
|
"message_content_edit_limit_seconds": message_content_edit_limit_seconds,
|
|
|
|
"allow_community_topic_editing": orjson.dumps(
|
2021-02-12 08:19:30 +01:00
|
|
|
allow_community_topic_editing
|
|
|
|
).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_success(
|
|
|
|
id_: int, unique_str: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
|
|
|
params_dict = {"message_id": id_, "topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2020-07-03 11:09:37 +02:00
|
|
|
result = self.client_patch("/json/messages/" + str(id_), params_dict)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
if topic_only:
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
else:
|
|
|
|
self.check_message(id_, topic_name=new_topic, content=new_content)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_edit_message_assert_error(
|
|
|
|
id_: int, unique_str: str, error: str, topic_only: bool = False
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
new_content = "content" + unique_str
|
|
|
|
params_dict = {"message_id": id_, "topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
if not topic_only:
|
2021-02-12 08:20:45 +01:00
|
|
|
params_dict["content"] = new_content
|
2020-07-03 11:09:37 +02:00
|
|
|
result = self.client_patch("/json/messages/" + str(id_), params_dict)
|
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
|
|
|
self.example_user("iago"), "Scotland", content="content", topic_name="topic"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
# test the various possible message editing settings
|
|
|
|
# high enough time limit, all edits allowed
|
|
|
|
set_message_editing_params(True, 240, False)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "A")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# out of time, only topic editing allowed
|
|
|
|
set_message_editing_params(True, 120, False)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "B", True)
|
|
|
|
do_edit_message_assert_error(id_, "C", "The time limit for editing this message has passed")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# infinite time, all edits allowed
|
|
|
|
set_message_editing_params(True, 0, False)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "D")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# without allow_message_editing, nothing is allowed
|
|
|
|
set_message_editing_params(False, 240, False)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "E", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
set_message_editing_params(False, 120, False)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "F", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
set_message_editing_params(False, 0, False)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_edit_message_assert_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
id_, "G", "Your organization has turned off message editing", True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_allow_community_topic_editing(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_editing_params(
|
|
|
|
allow_message_editing: bool,
|
|
|
|
message_content_edit_limit_seconds: int,
|
|
|
|
allow_community_topic_editing: bool,
|
|
|
|
) -> None:
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_editing": orjson.dumps(allow_message_editing).decode(),
|
|
|
|
"message_content_edit_limit_seconds": message_content_edit_limit_seconds,
|
|
|
|
"allow_community_topic_editing": orjson.dumps(
|
2021-02-12 08:19:30 +01:00
|
|
|
allow_community_topic_editing
|
|
|
|
).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
params_dict = {"message_id": id_, "topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
result = self.client_patch("/json/messages/" + str(id_), params_dict)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.check_topic(id_, topic_name=new_topic)
|
|
|
|
|
|
|
|
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
|
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
old_topic = message.topic_name()
|
|
|
|
old_content = message.content
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "topic" + unique_str
|
|
|
|
params_dict = {"message_id": id_, "topic": new_topic}
|
2020-07-03 11:09:37 +02:00
|
|
|
result = self.client_patch("/json/messages/" + str(id_), params_dict)
|
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
self.assert_json_error(result, error)
|
|
|
|
msg = Message.objects.get(id=id_)
|
|
|
|
self.assertEqual(msg.topic_name(), old_topic)
|
|
|
|
self.assertEqual(msg.content, old_content)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
# send a message in the past
|
2021-02-12 08:19:30 +01:00
|
|
|
id_ = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", content="content", topic_name="topic"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
message = Message.objects.get(id=id_)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=180)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
# any user can edit the topic of a message
|
|
|
|
set_message_editing_params(True, 0, True)
|
|
|
|
# log in as a new user
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
do_edit_message_assert_success(id_, "A")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# only admins can edit the topics of messages
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
set_message_editing_params(True, 0, False)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "B")
|
|
|
|
self.login("cordelia")
|
|
|
|
do_edit_message_assert_error(id_, "C", "You don't have permission to edit this message")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# users cannot edit topics if allow_message_editing is False
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
set_message_editing_params(False, 0, True)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
do_edit_message_assert_error(id_, "D", "Your organization has turned off message editing")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-01-05 06:22:08 +01:00
|
|
|
# non-admin users cannot edit topics sent > 72 hrs ago
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=290000)
|
2020-07-03 11:09:37 +02:00
|
|
|
message.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-07-03 11:09:37 +02:00
|
|
|
set_message_editing_params(True, 0, True)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_edit_message_assert_success(id_, "E")
|
|
|
|
self.login("cordelia")
|
|
|
|
do_edit_message_assert_error(id_, "F", "The time limit for editing this message has passed")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# anyone should be able to edit "no topic" indefinitely
|
|
|
|
message.set_topic_name("(no topic)")
|
|
|
|
message.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
do_edit_message_assert_success(id_, "D")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
@mock.patch("zerver.lib.actions.send_event")
|
|
|
|
def test_edit_topic_public_history_stream(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Where am I?")
|
|
|
|
|
|
|
|
self.login_user(cordelia)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_update_message_topic_success(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
topic_name: str,
|
|
|
|
users_to_be_notified: List[Dict[str, Any]],
|
|
|
|
) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
do_update_message(
|
|
|
|
user_profile=user_profile,
|
2021-05-10 06:10:32 +02:00
|
|
|
target_message=message,
|
2020-07-03 11:09:37 +02:00
|
|
|
new_stream=None,
|
|
|
|
topic_name=topic_name,
|
|
|
|
propagate_mode="change_later",
|
|
|
|
send_notification_to_old_thread=False,
|
|
|
|
send_notification_to_new_thread=False,
|
|
|
|
content=None,
|
|
|
|
rendered_content=None,
|
|
|
|
prior_mention_user_ids=set(),
|
|
|
|
mention_user_ids=set(),
|
|
|
|
mention_data=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
mock_send_event.assert_called_with(mock.ANY, mock.ANY, users_to_be_notified)
|
|
|
|
|
|
|
|
# Returns the users that need to be notified when a message topic is changed
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
um = UserMessage.objects.get(message=message_id)
|
|
|
|
if um.user_profile_id == user_id:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": um.flags_list(),
|
|
|
|
}
|
|
|
|
|
|
|
|
else:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["read"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
# Edit topic of a message sent before Cordelia subscribed the stream
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Othello eats apple", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
# If Cordelia is long-term idle, she doesn't get a notification.
|
|
|
|
cordelia.long_term_idle = True
|
|
|
|
cordelia.save()
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_message_topic_success(
|
|
|
|
cordelia, message, "Another topic idle", users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
cordelia.long_term_idle = False
|
|
|
|
cordelia.save()
|
|
|
|
|
|
|
|
# Even if Hamlet unsubscribes the stream, he should be notified when the topic is changed
|
|
|
|
# because he has a UserMessage row.
|
|
|
|
self.unsubscribe(hamlet, stream_name)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id, cordelia.id]))
|
|
|
|
do_update_message_topic_success(cordelia, message, "Another topic", users_to_be_notified)
|
|
|
|
|
|
|
|
# Hamlet subscribes to the stream again and Cordelia unsubscribes, then Hamlet changes
|
|
|
|
# the message topic. Cordelia won't receive any updates when a message on that stream is
|
|
|
|
# changed because she is not a subscriber and doesn't have a UserMessage row.
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.unsubscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
users_to_be_notified = list(map(notify, [hamlet.id]))
|
|
|
|
do_update_message_topic_success(hamlet, message, "Change again", users_to_be_notified)
|
|
|
|
|
|
|
|
@mock.patch("zerver.lib.actions.send_event")
|
|
|
|
def test_wildcard_mention(self, mock_send_event: mock.MagicMock) -> None:
|
|
|
|
stream_name = "Macbeth"
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.make_stream(stream_name, history_public_to_subscribers=True)
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "Hello everyone")
|
|
|
|
|
|
|
|
def notify(user_id: int) -> Dict[str, Any]:
|
|
|
|
return {
|
|
|
|
"id": user_id,
|
|
|
|
"flags": ["wildcard_mentioned"],
|
|
|
|
}
|
|
|
|
|
|
|
|
users_to_be_notified = sorted(map(notify, [cordelia.id, hamlet.id]), key=itemgetter("id"))
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(message_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": message_id,
|
|
|
|
"content": "Hello @**everyone**",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Extract the send_event call where event type is 'update_message'.
|
|
|
|
# Here we assert wildcard_mention_user_ids has been set properly.
|
|
|
|
called = False
|
|
|
|
for call_args in mock_send_event.call_args_list:
|
|
|
|
(arg_realm, arg_event, arg_notified_users) = call_args[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
if arg_event["type"] == "update_message":
|
|
|
|
self.assertEqual(arg_event["type"], "update_message")
|
|
|
|
self.assertEqual(arg_event["wildcard_mention_user_ids"], [cordelia.id, hamlet.id])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
sorted(arg_notified_users, key=itemgetter("id")), users_to_be_notified
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
called = True
|
|
|
|
self.assertTrue(called)
|
|
|
|
|
2020-12-28 11:30:07 +01:00
|
|
|
def test_topic_edit_history_saved_in_all_message(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Rome", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic1")
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
def verify_edit_history(new_topic: str, len_edit_history: int) -> None:
|
|
|
|
for msg_id in [id1, id2, id5]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
|
|
|
# Since edit history is being generated by do_update_message,
|
|
|
|
# it's contents can vary over time; So, to keep this test
|
|
|
|
# future proof, we only verify it's length.
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(orjson.loads(msg.edit_history), len_edit_history)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
for msg_id in [id3, id4]:
|
|
|
|
msg = Message.objects.get(id=msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(msg.edit_history, None)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": id1,
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 1)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
new_topic = "edited2"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": id1,
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-12-28 11:30:07 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
verify_edit_history(new_topic, 2)
|
|
|
|
|
2021-04-22 07:23:04 +02:00
|
|
|
def test_topic_and_content_edit(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
id1 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", "message 1", "topic"
|
|
|
|
)
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Scotland", "message 2", "topic")
|
|
|
|
id3 = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Scotland", "message 3", "topic"
|
|
|
|
)
|
|
|
|
|
|
|
|
new_topic = "edited"
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
|
|
|
"message_id": id1,
|
|
|
|
"topic": new_topic,
|
|
|
|
"propagate_mode": "change_later",
|
|
|
|
"content": "edited message",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Content change of only id1 should come in edit history
|
|
|
|
# and topic change should be present in all the messages.
|
|
|
|
msg1 = Message.objects.get(id=id1)
|
|
|
|
msg2 = Message.objects.get(id=id2)
|
|
|
|
msg3 = Message.objects.get(id=id3)
|
|
|
|
|
|
|
|
msg1_edit_history = orjson.loads(msg1.edit_history)
|
|
|
|
self.assertTrue("prev_content" in msg1_edit_history[0].keys())
|
|
|
|
|
|
|
|
for msg in [msg2, msg3]:
|
|
|
|
self.assertFalse("prev_content" in orjson.loads(msg.edit_history)[0].keys())
|
|
|
|
|
|
|
|
for msg in [msg1, msg2, msg3]:
|
|
|
|
self.assertEqual(
|
|
|
|
new_topic,
|
|
|
|
msg.topic_name(),
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(orjson.loads(msg.edit_history), 1)
|
2021-04-22 07:23:04 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_propagate_topic_forward(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Rome", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic1")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": id1,
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
|
|
|
|
def test_propagate_all_topics(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Rome", topic_name="topic1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic2")
|
|
|
|
id5 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic1")
|
|
|
|
id6 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="topic3")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id2),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": id2,
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topic1")
|
|
|
|
self.check_topic(id4, topic_name="topic2")
|
|
|
|
self.check_topic(id5, topic_name="edited")
|
|
|
|
self.check_topic(id6, topic_name="topic3")
|
|
|
|
|
|
|
|
def test_propagate_all_topics_with_different_uppercase_letters(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
id2 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="Topic1")
|
|
|
|
id3 = self.send_stream_message(self.example_user("iago"), "Rome", topic_name="topiC1")
|
|
|
|
id4 = self.send_stream_message(self.example_user("iago"), "Scotland", topic_name="toPic1")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id2),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": id2,
|
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.check_topic(id1, topic_name="edited")
|
|
|
|
self.check_topic(id2, topic_name="edited")
|
|
|
|
self.check_topic(id3, topic_name="topiC1")
|
|
|
|
self.check_topic(id4, topic_name="edited")
|
|
|
|
|
|
|
|
def test_propagate_invalid(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
id1 = self.send_stream_message(self.example_user("hamlet"), "Scotland", topic_name="topic1")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic": "edited",
|
|
|
|
"propagate_mode": "invalid",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid propagate_mode")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(id1),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": "edited",
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid propagate_mode without topic edit")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.check_topic(id1, topic_name="topic1")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def prepare_move_topics(
|
|
|
|
self, user_email: str, old_stream: str, new_stream: str, topic: str
|
|
|
|
) -> Tuple[UserProfile, Stream, Stream, int, int]:
|
2020-07-03 11:09:37 +02:00
|
|
|
user_profile = self.example_user(user_email)
|
|
|
|
self.login(user_email)
|
|
|
|
stream = self.make_stream(old_stream)
|
|
|
|
new_stream = self.make_stream(new_stream)
|
|
|
|
self.subscribe(user_profile, stream.name)
|
|
|
|
self.subscribe(user_profile, new_stream.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="First"
|
|
|
|
)
|
|
|
|
msg_id_lt = self.send_stream_message(
|
|
|
|
user_profile, stream.name, topic_name=topic, content="Second"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_stream_message(user_profile, stream.name, topic_name=topic, content="third")
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
return (user_profile, stream, new_stream, msg_id, msg_id_lt)
|
|
|
|
|
|
|
|
def test_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-09 15:31:07 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_to_another_realm(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
lear_realm = get_realm("lear")
|
|
|
|
new_stream = self.make_stream("new", lear_realm)
|
|
|
|
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Invalid stream id")
|
|
|
|
|
|
|
|
def test_move_message_realm_admin_cant_move_to_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
new_stream = self.make_stream("new", invite_only=True)
|
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona", topic_name="test123")
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Invalid stream id")
|
|
|
|
|
2021-04-10 17:04:53 +02:00
|
|
|
def test_move_message_realm_admin_cant_move_from_private_stream_without_subscription(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
self.make_stream("privatestream", invite_only=True)
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"You don't have permission to move this message due to missing access to its stream",
|
|
|
|
)
|
|
|
|
|
2021-05-13 16:23:26 +02:00
|
|
|
def test_move_message_from_private_stream_message_access_checks(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
private_stream = self.make_stream(
|
|
|
|
"privatestream", invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
|
|
|
self.subscribe(hamlet, "privatestream")
|
|
|
|
original_msg_id = self.send_stream_message(hamlet, "privatestream", topic_name="test123")
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
new_msg_id = self.send_stream_message(user_profile, "privatestream", topic_name="test123")
|
|
|
|
|
|
|
|
# Now we unsub and hamlet sends a new message (we won't have access to it even after re-subbing!)
|
|
|
|
self.unsubscribe(user_profile, "privatestream")
|
|
|
|
new_inaccessible_msg_id = self.send_stream_message(
|
|
|
|
hamlet, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Re-subscribe and send another message:
|
|
|
|
self.subscribe(user_profile, "privatestream")
|
|
|
|
newest_msg_id = self.send_stream_message(
|
|
|
|
user_profile, "privatestream", topic_name="test123"
|
|
|
|
)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(new_msg_id),
|
|
|
|
{
|
|
|
|
"message_id": new_msg_id,
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(Message.objects.get(id=new_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
self.assertEqual(Message.objects.get(id=newest_msg_id).recipient_id, verona.recipient_id)
|
|
|
|
# The original message and the new, inaccessible message weren't moved,
|
|
|
|
# because user_profile doesn't have access to them.
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=original_msg_id).recipient_id, private_stream.recipient_id
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.get(id=new_inaccessible_msg_id).recipient_id,
|
|
|
|
private_stream.recipient_id,
|
|
|
|
)
|
|
|
|
|
2021-04-10 17:47:11 +02:00
|
|
|
def test_move_message_cant_move_private_message(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.login("iago")
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
msg_id = self.send_personal_message(user_profile, hamlet)
|
|
|
|
|
|
|
|
verona = get_stream("Verona", user_profile.realm)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": verona.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_error(result, "Message must be a stream message")
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_change_later(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id_later),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id_later,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_later",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 2)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[1].content,
|
|
|
|
f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assertEqual(messages[0].id, msg_id_later)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[2].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def test_move_message_between_streams_policy_setting(self) -> None:
|
2020-07-03 11:09:37 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-04-08 21:30:40 +02:00
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
def check_move_message_according_to_policy(role: int, expect_fail: bool = False) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if expect_fail:
|
|
|
|
self.assert_json_error(result, "You don't have permission to move this message")
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-04-08 21:30:40 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-04-08 21:30:40 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-04-08 21:30:40 +02:00
|
|
|
|
|
|
|
# Check sending messages when policy is Realm.POLICY_ADMINS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_ADMINS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-04-08 21:30:40 +02:00
|
|
|
# Check sending messages when policy is Realm.POLICY_MODERATORS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MODERATORS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MODERATOR)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_FULL_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_FULL_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER, expect_fail=True)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-04-08 21:30:40 +02:00
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
# Check sending messages when policy is Realm.POLICY_MEMBERS_ONLY.
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_GUEST, expect_fail=True)
|
|
|
|
check_move_message_according_to_policy(UserProfile.ROLE_MEMBER)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-05-04 21:58:01 +02:00
|
|
|
def test_move_message_to_stream_based_on_stream_post_policy(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_1", "new_stream_1", "test"
|
|
|
|
)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"move_messages_between_streams_policy",
|
|
|
|
Realm.POLICY_MEMBERS_ONLY,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
def check_move_message_to_stream(role: int, error_msg: Optional[str] = None) -> None:
|
|
|
|
do_change_user_role(user_profile, role, acting_user=None)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
if error_msg is not None:
|
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2021-05-04 21:58:01 +02:00
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-05-04 21:58:01 +02:00
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-05-04 21:58:01 +02:00
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_ADMINS.
|
|
|
|
do_change_stream_post_policy(new_stream, Stream.STREAM_POST_POLICY_ADMINS)
|
|
|
|
error_msg = "Only organization administrators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_2", "new_stream_2", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_MODERATORS.
|
|
|
|
do_change_stream_post_policy(new_stream, Stream.STREAM_POST_POLICY_MODERATORS)
|
|
|
|
error_msg = "Only organization administrators and moderators can send to this stream."
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_3", "new_stream_3", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS.
|
|
|
|
do_change_stream_post_policy(new_stream, Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS)
|
|
|
|
error_msg = "New members cannot send to this stream."
|
|
|
|
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER, error_msg)
|
|
|
|
|
|
|
|
do_set_realm_property(user_profile.realm, "waiting_period_threshold", 0, acting_user=None)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
|
|
|
"othello", "old_stream_4", "new_stream_4", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check when stream_post_policy is STREAM_POST_POLICY_EVERYONE.
|
|
|
|
# In this case also, guest is not allowed as we do not allow guest to move
|
|
|
|
# messages between streams in any case, so stream_post_policy of new stream does
|
|
|
|
# not matter.
|
|
|
|
do_change_stream_post_policy(new_stream, Stream.STREAM_POST_POLICY_EVERYONE)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm, "waiting_period_threshold", 100000, acting_user=None
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(
|
|
|
|
UserProfile.ROLE_GUEST, "You don't have permission to move this message"
|
|
|
|
)
|
|
|
|
check_move_message_to_stream(UserProfile.ROLE_MEMBER)
|
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
def test_move_message_to_stream_with_content(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"content": "Not allowed",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_error(result, "Cannot change message content while changing stream")
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_move_message_to_stream_and_topic(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_later) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
with queries_captured() as queries, cache_tries_captured() as cache_tries:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 52)
|
|
|
|
self.assert_length(cache_tries, 13)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>new topic**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "new topic")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_inaccessible_msg_after_stream_change(self) -> None:
|
|
|
|
"""Simulates the case where message is moved to a stream where user is not a subscribed"""
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
non_guest_user = self.example_user("hamlet")
|
2020-07-03 11:09:37 +02:00
|
|
|
self.subscribe(guest_user, old_stream.name)
|
|
|
|
self.subscribe(non_guest_user, old_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id_to_test_acesss = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile, old_stream.name, topic_name="test", content="fourth"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user, Message.objects.get(id=msg_id_to_test_acesss), has_user_message=False
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"topic": "new topic",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
False,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-05-12 23:07:07 +02:00
|
|
|
has_message_access(
|
|
|
|
non_guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
)
|
2021-05-12 00:31:03 +02:00
|
|
|
self.assertEqual(
|
|
|
|
# If the guest user were subscribed to the new stream,
|
|
|
|
# they'd have access; has_message_access does not validate
|
|
|
|
# the is_subscribed parameter.
|
|
|
|
has_message_access(
|
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
2021-05-12 23:07:07 +02:00
|
|
|
has_user_message=False,
|
2021-05-12 00:31:03 +02:00
|
|
|
stream=new_stream,
|
|
|
|
is_subscribed=True,
|
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=new_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
),
|
|
|
|
False,
|
|
|
|
)
|
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
# Raises assertion if you pass an invalid stream.
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
guest_user,
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
|
|
|
stream=old_stream,
|
2021-05-12 00:31:03 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=non_guest_user.id,
|
|
|
|
message_id=msg_id_to_test_acesss,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
has_message_access(
|
2021-05-12 23:07:07 +02:00
|
|
|
self.example_user("iago"),
|
|
|
|
Message.objects.get(id=msg_id_to_test_acesss),
|
|
|
|
has_user_message=False,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
True,
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_no_notify_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_new_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "false",
|
|
|
|
"send_notification_to_new_thread": "true",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 0)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 4)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[3].content,
|
|
|
|
f"This topic was moved here from #**test move stream>test** by @_**Iago|{user_profile.id}**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
def test_notify_old_thread_move_message_to_stream(self) -> None:
|
|
|
|
(user_profile, old_stream, new_stream, msg_id, msg_id_lt) = self.prepare_move_topics(
|
2021-02-12 08:19:30 +01:00
|
|
|
"iago", "test move stream", "new stream", "test"
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
"send_notification_to_old_thread": "true",
|
|
|
|
"send_notification_to_new_thread": "false",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved by @_**Iago|{user_profile.id}** to #**new stream>test**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
|
|
|
messages = get_topic_messages(user_profile, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def parameterized_test_move_message_involving_private_stream(
|
|
|
|
self,
|
|
|
|
from_invite_only: bool,
|
|
|
|
history_public_to_subscribers: bool,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created: bool,
|
2021-02-12 08:19:30 +01:00
|
|
|
to_invite_only: bool = True,
|
2020-09-03 20:57:49 +02:00
|
|
|
) -> None:
|
|
|
|
admin_user = self.example_user("iago")
|
2021-02-12 08:20:45 +01:00
|
|
|
user_losing_access = self.example_user("cordelia")
|
|
|
|
user_gaining_access = self.example_user("hamlet")
|
2020-09-03 20:57:49 +02:00
|
|
|
|
2020-07-03 11:09:37 +02:00
|
|
|
self.login("iago")
|
2020-09-03 20:57:49 +02:00
|
|
|
old_stream = self.make_stream("test move stream", invite_only=from_invite_only)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_stream = self.make_stream(
|
|
|
|
"new stream",
|
|
|
|
invite_only=to_invite_only,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
self.subscribe(admin_user, old_stream.name)
|
|
|
|
self.subscribe(user_losing_access, old_stream.name)
|
|
|
|
|
|
|
|
self.subscribe(admin_user, new_stream.name)
|
|
|
|
self.subscribe(user_gaining_access, new_stream.name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
msg_id = self.send_stream_message(
|
|
|
|
admin_user, old_stream.name, topic_name="test", content="First"
|
|
|
|
)
|
|
|
|
self.send_stream_message(admin_user, old_stream.name, topic_name="test", content="Second")
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"stream_id": new_stream.id,
|
|
|
|
"propagate_mode": "change_all",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
self.assert_json_success(result)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, old_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
messages[0].content,
|
|
|
|
f"This topic was moved by @_**Iago|{admin_user.id}** to #**new stream>test**",
|
|
|
|
)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
messages = get_topic_messages(admin_user, new_stream, "test")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(messages, 3)
|
2020-07-03 11:09:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_losing_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
# When the history is shared, UserMessage is not created for the user but the user
|
|
|
|
# can see the message.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_gaining_access.id,
|
|
|
|
message_id=msg_id,
|
|
|
|
).count(),
|
|
|
|
1 if user_messages_created else 0,
|
|
|
|
)
|
2020-09-03 20:57:49 +02:00
|
|
|
|
|
|
|
def test_move_message_from_public_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2020-09-03 20:57:49 +02:00
|
|
|
def test_move_message_from_public_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_not_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=False,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=True,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_private_stream_shared_history(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_move_message_from_private_to_public(self) -> None:
|
|
|
|
self.parameterized_test_move_message_involving_private_stream(
|
|
|
|
from_invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2020-09-28 08:43:07 +02:00
|
|
|
user_messages_created=False,
|
2020-09-03 20:57:49 +02:00
|
|
|
to_invite_only=False,
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
|
2021-04-08 19:33:22 +02:00
|
|
|
def test_can_move_messages_between_streams(self) -> None:
|
|
|
|
def validation_func(user_profile: UserProfile) -> bool:
|
|
|
|
user_profile.refresh_from_db()
|
|
|
|
return user_profile.can_move_messages_between_streams()
|
|
|
|
|
|
|
|
self.check_has_permission_policies("move_messages_between_streams_policy", validation_func)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-04 03:20:40 +02:00
|
|
|
class DeleteMessageTest(ZulipTestCase):
|
|
|
|
def test_delete_message_invalid_request_format(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-07-04 03:20:40 +02:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Scotland")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id + 1}", {"message_id": msg_id})
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_delete_message_by_user(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_message_deleting_params(
|
|
|
|
allow_message_deleting: bool, message_content_delete_limit_seconds: int
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"allow_message_deleting": orjson.dumps(allow_message_deleting).decode(),
|
|
|
|
"message_content_delete_limit_seconds": message_content_delete_limit_seconds,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2020-07-04 03:20:40 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_delete_message_by_admin(msg_id: int) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def test_delete_message_by_owner(msg_id: int) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
def test_delete_message_by_other_user(msg_id: int) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
|
|
|
result = self.client_delete(f"/json/messages/{msg_id}")
|
2020-07-04 03:20:40 +02:00
|
|
|
return result
|
|
|
|
|
|
|
|
# Test if message deleting is not allowed(default).
|
|
|
|
set_message_deleting_params(False, 0)
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-07-04 03:20:40 +02:00
|
|
|
self.login_user(hamlet)
|
|
|
|
msg_id = self.send_stream_message(hamlet, "Scotland")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if message deleting is allowed.
|
|
|
|
# Test if time limit is zero(no limit).
|
|
|
|
set_message_deleting_params(True, 0)
|
|
|
|
msg_id = self.send_stream_message(hamlet, "Scotland")
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=600)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test if time limit is non-zero.
|
|
|
|
set_message_deleting_params(True, 240)
|
|
|
|
msg_id_1 = self.send_stream_message(hamlet, "Scotland")
|
|
|
|
message = Message.objects.get(id=msg_id_1)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=120)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Scotland")
|
|
|
|
message = Message.objects.get(id=msg_id_2)
|
|
|
|
message.date_sent = message.date_sent - datetime.timedelta(seconds=360)
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
result = test_delete_message_by_other_user(msg_id=msg_id_1)
|
|
|
|
self.assert_json_error(result, "You don't have permission to delete this message")
|
|
|
|
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_1)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id_2)
|
|
|
|
self.assert_json_error(result, "The time limit for deleting this message has passed")
|
|
|
|
|
|
|
|
# No limit for admin.
|
|
|
|
result = test_delete_message_by_admin(msg_id=msg_id_2)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Test multiple delete requests with no latency issues
|
|
|
|
msg_id = self.send_stream_message(hamlet, "Scotland")
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
|
|
|
|
|
|
|
# Test handling of 500 error caused by multiple delete requests due to latency.
|
|
|
|
# see issue #11219.
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch("zerver.views.message_edit.do_delete_messages") as m, mock.patch(
|
|
|
|
"zerver.views.message_edit.validate_can_delete_message", return_value=None
|
|
|
|
), mock.patch("zerver.views.message_edit.access_message", return_value=(None, None)):
|
2020-07-04 03:20:40 +02:00
|
|
|
m.side_effect = IntegrityError()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|
|
|
|
m.side_effect = Message.DoesNotExist()
|
|
|
|
result = test_delete_message_by_owner(msg_id=msg_id)
|
|
|
|
self.assert_json_error(result, "Message already deleted")
|