2022-11-01 10:00:38 +01:00
|
|
|
from typing import TYPE_CHECKING, Any, List, Mapping, Set
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2022-08-14 12:02:05 +02:00
|
|
|
from django.db import connection, transaction
|
2016-06-03 07:59:00 +02:00
|
|
|
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.message_flags import do_update_message_flags
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import do_change_stream_permission
|
2023-03-03 18:00:27 +01:00
|
|
|
from zerver.actions.user_topics import do_mute_topic
|
2020-06-18 09:23:13 +02:00
|
|
|
from zerver.lib.fix_unreads import fix, fix_unsubscribed
|
2020-06-29 13:19:17 +02:00
|
|
|
from zerver.lib.message import (
|
2021-06-09 13:31:39 +02:00
|
|
|
MessageDetailsDict,
|
2020-06-29 13:19:17 +02:00
|
|
|
MessageDict,
|
2021-06-09 13:31:39 +02:00
|
|
|
RawUnreadMessagesResult,
|
|
|
|
RawUnreadPrivateMessageDict,
|
2020-06-29 13:19:17 +02:00
|
|
|
UnreadMessagesResult,
|
2021-06-09 13:31:39 +02:00
|
|
|
add_message_to_unread_msgs,
|
2020-06-29 13:19:17 +02:00
|
|
|
aggregate_unread_data,
|
|
|
|
apply_unread_message_event,
|
2020-07-08 00:35:59 +02:00
|
|
|
bulk_access_messages,
|
2021-06-09 13:31:39 +02:00
|
|
|
format_unread_message_details,
|
2020-06-29 13:19:17 +02:00
|
|
|
get_raw_unread_data,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2022-11-01 10:00:38 +01:00
|
|
|
from zerver.lib.test_helpers import get_subscription, timeout_mock
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.timeout import TimeoutExpiredError
|
2020-06-29 13:19:17 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
)
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2020-07-09 15:48:10 +02:00
|
|
|
def check_flags(flags: List[str], expected: Set[str]) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-07-09 15:48:10 +02:00
|
|
|
The has_alert_word flag can be ignored for most tests.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
assert "has_alert_word" not in expected
|
2020-07-09 15:48:10 +02:00
|
|
|
flag_set = set(flags)
|
2021-02-12 08:20:45 +01:00
|
|
|
flag_set.discard("has_alert_word")
|
2020-07-09 15:48:10 +02:00
|
|
|
if flag_set != expected:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"expected flags (ignoring has_alert_word) to be {expected}")
|
2020-07-09 15:48:10 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
class FirstUnreadAnchorTests(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-06-26 19:51:10 +02:00
|
|
|
HISTORICAL NOTE:
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
The two tests in this class were originally written when
|
|
|
|
we had the concept of a "pointer", and they may be a bit
|
|
|
|
redundant in what they now check.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
def test_use_first_unread_anchor(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-08-04 20:26:38 +02:00
|
|
|
|
2017-03-19 01:46:39 +01:00
|
|
|
# Mark all existing messages as read
|
2022-11-01 10:00:38 +01:00
|
|
|
with timeout_mock("zerver.views.message_flags"):
|
2022-10-02 21:32:36 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read")
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Send a new message (this will be unread)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona", "test")
|
2017-03-19 01:46:39 +01:00
|
|
|
|
|
|
|
# If we call get_messages with use_first_unread_anchor=True, we
|
|
|
|
# should get the message we just sent
|
2020-01-29 03:29:15 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(messages_response["messages"][0]["id"], new_message_id)
|
|
|
|
self.assertEqual(messages_response["anchor"], new_message_id)
|
2020-01-29 03:29:15 +01:00
|
|
|
|
|
|
|
# Test with the old way of expressing use_first_unread_anchor=True
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor=0, num_before=0, num_after=1, use_first_unread_anchor=True
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(messages_response["messages"][0]["id"], new_message_id)
|
|
|
|
self.assertEqual(messages_response["anchor"], new_message_id)
|
2017-03-19 01:46:39 +01:00
|
|
|
|
2020-01-29 03:29:15 +01:00
|
|
|
# We want to get the message_id of an arbitrary old message. We can
|
2017-03-19 01:46:39 +01:00
|
|
|
# call get_messages with use_first_unread_anchor=False and simply
|
|
|
|
# save the first message we're returned.
|
|
|
|
messages = self.get_messages(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor=0, num_before=0, num_after=2, use_first_unread_anchor=False
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
old_message_id = messages[0]["id"]
|
2017-03-19 01:46:39 +01:00
|
|
|
|
|
|
|
# Verify the message is marked as read
|
|
|
|
user_message = UserMessage.objects.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_id=old_message_id, user_profile=self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assertTrue(user_message.flags.read)
|
|
|
|
|
|
|
|
# Let's set this old message to be unread
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([old_message_id]).decode(), "op": "remove", "flag": "read"},
|
|
|
|
)
|
2017-03-19 01:46:39 +01:00
|
|
|
|
|
|
|
# Verify it's now marked as unread
|
|
|
|
user_message = UserMessage.objects.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
message_id=old_message_id, user_profile=self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertFalse(user_message.flags.read)
|
|
|
|
|
|
|
|
# Now if we call get_messages with use_first_unread_anchor=True,
|
|
|
|
# we should get the old message we just set to unread
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(messages_response["messages"][0]["id"], old_message_id)
|
|
|
|
self.assertEqual(messages_response["anchor"], old_message_id)
|
2017-03-19 01:46:39 +01:00
|
|
|
|
2018-01-02 18:33:28 +01:00
|
|
|
def test_visible_messages_use_first_unread_anchor(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2022-11-01 10:00:38 +01:00
|
|
|
with timeout_mock("zerver.views.message_flags"):
|
2022-10-02 21:32:36 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read")
|
2018-01-02 18:33:28 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona", "test")
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(messages_response["messages"][0]["id"], new_message_id)
|
|
|
|
self.assertEqual(messages_response["anchor"], new_message_id)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2022-11-10 00:35:52 +01:00
|
|
|
"zerver.lib.narrow.get_first_visible_message_id", return_value=new_message_id
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(messages_response["messages"][0]["id"], new_message_id)
|
|
|
|
self.assertEqual(messages_response["anchor"], new_message_id)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2022-11-10 00:35:52 +01:00
|
|
|
"zerver.lib.narrow.get_first_visible_message_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
return_value=new_message_id + 1,
|
|
|
|
):
|
2023-01-02 20:50:23 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1
|
|
|
|
)
|
2023-01-02 20:50:23 +01:00
|
|
|
self.assert_length(messages_response["messages"], 0)
|
|
|
|
self.assertIn("anchor", messages_response)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2022-11-10 00:35:52 +01:00
|
|
|
"zerver.lib.narrow.get_first_visible_message_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
return_value=new_message_id - 1,
|
|
|
|
):
|
|
|
|
messages = self.get_messages(anchor="first_unread", num_before=0, num_after=1)
|
2018-01-02 18:33:28 +01:00
|
|
|
self.assert_length(messages, 1)
|
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class UnreadCountTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.push_notifications.push_notifications_enabled", return_value=True
|
2021-02-12 08:19:30 +01:00
|
|
|
) as mock_push_notifications_enabled:
|
2018-12-11 07:05:40 +01:00
|
|
|
self.unread_msg_ids = [
|
|
|
|
self.send_personal_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("iago"), self.example_user("hamlet"), "hello"
|
|
|
|
),
|
2018-12-11 07:05:40 +01:00
|
|
|
self.send_personal_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("iago"), self.example_user("hamlet"), "hello2"
|
|
|
|
),
|
|
|
|
]
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications_enabled.assert_called()
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2016-06-04 20:28:02 +02:00
|
|
|
# Sending a new message results in unread UserMessages being created
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_new_message(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2014-02-07 23:10:14 +01:00
|
|
|
content = "Test message for unset read bit"
|
2020-03-07 11:43:05 +01:00
|
|
|
last_msg = self.send_stream_message(self.example_user("hamlet"), "Verona", content)
|
2014-02-07 23:10:14 +01:00
|
|
|
user_messages = list(UserMessage.objects.filter(message=last_msg))
|
2021-07-13 19:42:37 +02:00
|
|
|
self.assertGreater(len(user_messages), 0)
|
2014-02-07 23:10:14 +01:00
|
|
|
for um in user_messages:
|
|
|
|
self.assertEqual(um.message.content, content)
|
2017-05-25 01:40:26 +02:00
|
|
|
if um.user_profile.email != self.example_email("hamlet"):
|
2014-02-07 23:10:14 +01:00
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_flags(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(self.unread_msg_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
2014-02-07 23:10:14 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Ensure we properly set the flags
|
|
|
|
found = 0
|
2017-03-24 07:51:46 +01:00
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
if msg["id"] in self.unread_msg_ids:
|
|
|
|
check_flags(msg["flags"], {"read"})
|
2014-02-07 23:10:14 +01:00
|
|
|
found += 1
|
|
|
|
self.assertEqual(found, 2)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{
|
|
|
|
"messages": orjson.dumps([self.unread_msg_ids[1]]).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
},
|
|
|
|
)
|
2014-02-07 23:10:14 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Ensure we properly remove just one flag
|
2017-03-24 07:51:46 +01:00
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
if msg["id"] == self.unread_msg_ids[0]:
|
|
|
|
check_flags(msg["flags"], {"read"})
|
|
|
|
elif msg["id"] == self.unread_msg_ids[1]:
|
|
|
|
check_flags(msg["flags"], set())
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2022-11-10 01:06:37 +01:00
|
|
|
def test_update_flags_for_narrow(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login_user(user)
|
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("cordelia"), "Verona", topic_name=f"topic {i % 2}"
|
|
|
|
)
|
|
|
|
for i in range(10)
|
|
|
|
]
|
|
|
|
|
|
|
|
response = self.assert_json_success(
|
|
|
|
self.client_post(
|
|
|
|
"/json/messages/flags/narrow",
|
|
|
|
{
|
|
|
|
"anchor": message_ids[5],
|
|
|
|
"num_before": 2,
|
|
|
|
"num_after": 2,
|
|
|
|
"narrow": "[]",
|
|
|
|
"op": "add",
|
|
|
|
"flag": "read",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
self.assertEqual(response["processed_count"], 5)
|
|
|
|
self.assertEqual(response["updated_count"], 5)
|
|
|
|
self.assertEqual(response["first_processed_id"], message_ids[3])
|
|
|
|
self.assertEqual(response["last_processed_id"], message_ids[7])
|
|
|
|
self.assertEqual(response["found_oldest"], False)
|
|
|
|
self.assertEqual(response["found_newest"], False)
|
|
|
|
self.assertCountEqual(
|
|
|
|
UserMessage.objects.filter(user_profile_id=user.id, message_id__in=message_ids)
|
|
|
|
.extra(where=[UserMessage.where_read()])
|
|
|
|
.values_list("message_id", flat=True),
|
|
|
|
message_ids[3:8],
|
|
|
|
)
|
|
|
|
|
|
|
|
response = self.assert_json_success(
|
|
|
|
self.client_post(
|
|
|
|
"/json/messages/flags/narrow",
|
|
|
|
{
|
|
|
|
"anchor": message_ids[3],
|
|
|
|
"include_anchor": "false",
|
|
|
|
"num_before": 0,
|
|
|
|
"num_after": 5,
|
|
|
|
"narrow": orjson.dumps(
|
|
|
|
[
|
|
|
|
{"operator": "stream", "operand": "Verona"},
|
|
|
|
{"operator": "topic", "operand": "topic 1"},
|
|
|
|
]
|
|
|
|
).decode(),
|
|
|
|
"op": "add",
|
|
|
|
"flag": "starred",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# In this topic (1, 3, 5, 7, 9), processes everything after 3.
|
|
|
|
self.assertEqual(response["processed_count"], 3)
|
|
|
|
self.assertEqual(response["updated_count"], 3)
|
|
|
|
self.assertEqual(response["first_processed_id"], message_ids[5])
|
|
|
|
self.assertEqual(response["last_processed_id"], message_ids[9])
|
|
|
|
self.assertEqual(response["found_oldest"], False)
|
|
|
|
self.assertEqual(response["found_newest"], True)
|
|
|
|
self.assertCountEqual(
|
|
|
|
UserMessage.objects.filter(user_profile_id=user.id, message_id__in=message_ids)
|
|
|
|
.extra(where=[UserMessage.where_starred()])
|
|
|
|
.values_list("message_id", flat=True),
|
|
|
|
message_ids[5::2],
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_update_flags_for_narrow_misuse(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
response = self.client_post(
|
|
|
|
"/json/messages/flags/narrow",
|
|
|
|
{
|
|
|
|
"anchor": "0",
|
|
|
|
"include_anchor": "false",
|
|
|
|
"num_before": "1",
|
|
|
|
"num_after": "1",
|
|
|
|
"narrow": "[]",
|
|
|
|
"op": "add",
|
|
|
|
"flag": "read",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_error(response, "The anchor can only be excluded at an end of the range")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_stream_read(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
user_profile = self.example_user("hamlet")
|
2017-08-25 06:01:29 +02:00
|
|
|
stream = self.subscribe(user_profile, "test_stream")
|
|
|
|
self.subscribe(self.example_user("cordelia"), "test_stream")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello")
|
2021-02-12 08:19:30 +01:00
|
|
|
unrelated_message_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Denmark", "hello"
|
|
|
|
)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
events: List[Mapping[str, Any]] = []
|
2021-05-28 07:27:50 +02:00
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_stream_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": stream.id,
|
|
|
|
},
|
|
|
|
)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
event = events[0]["event"]
|
2021-02-12 08:19:30 +01:00
|
|
|
expected = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
operation="add",
|
2021-02-12 08:19:30 +01:00
|
|
|
messages=[message_id],
|
2021-02-12 08:20:45 +01:00
|
|
|
flag="read",
|
|
|
|
type="update_message_flags",
|
2021-02-12 08:19:30 +01:00
|
|
|
all=False,
|
|
|
|
)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
differences = [key for key in expected if expected[key] != event[key]]
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(differences, 0)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2016-05-08 15:20:51 +02:00
|
|
|
um = list(UserMessage.objects.filter(message=message_id))
|
|
|
|
for msg in um:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile.email == hamlet.email:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertTrue(msg.flags.read)
|
|
|
|
else:
|
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
|
|
|
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
|
|
|
|
for msg in unrelated_messages:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile.email == hamlet.email:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_invalid_stream_read(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-08-15 19:34:15 +02:00
|
|
|
invalid_stream_id = "12345678"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_stream_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": invalid_stream_id,
|
|
|
|
},
|
|
|
|
)
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_topics_unread_with_invalid_stream_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-08-15 19:28:32 +02:00
|
|
|
invalid_stream_id = "12345678"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_topic_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": invalid_stream_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
"topic_name": "whatever",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2017-08-08 16:11:45 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_stream_topic_read(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
user_profile = self.example_user("hamlet")
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(user_profile, "test_stream")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
message_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "test_stream", "hello", "test_topic"
|
|
|
|
)
|
|
|
|
unrelated_message_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Denmark", "hello", "Denmark2"
|
|
|
|
)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
events: List[Mapping[str, Any]] = []
|
2021-05-28 07:27:50 +02:00
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_topic_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": get_stream("test_stream", user_profile.realm).id,
|
|
|
|
"topic_name": "test_topic",
|
|
|
|
},
|
|
|
|
)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
event = events[0]["event"]
|
2021-02-12 08:19:30 +01:00
|
|
|
expected = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
operation="add",
|
2021-02-12 08:19:30 +01:00
|
|
|
messages=[message_id],
|
2021-02-12 08:20:45 +01:00
|
|
|
flag="read",
|
|
|
|
type="update_message_flags",
|
2021-02-12 08:19:30 +01:00
|
|
|
all=False,
|
|
|
|
)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
differences = [key for key in expected if expected[key] != event[key]]
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(differences, 0)
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
um = list(UserMessage.objects.filter(message=message_id))
|
|
|
|
for msg in um:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile_id == user_profile.id:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertTrue(msg.flags.read)
|
|
|
|
|
|
|
|
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
|
|
|
|
for msg in unrelated_messages:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile_id == user_profile.id:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_invalid_topic_read(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-05-08 15:20:51 +02:00
|
|
|
invalid_topic_name = "abc"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_topic_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": get_stream("Denmark", get_realm("zulip")).id,
|
|
|
|
"topic_name": invalid_topic_name,
|
|
|
|
},
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "No such topic 'abc'")
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-09-01 13:15:32 +02:00
|
|
|
class FixUnreadTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_fix_unreads(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
othello = self.example_user("othello")
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def send_message(stream_name: str, topic_name: str) -> int:
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(othello, stream_name)
|
|
|
|
msg_id = self.send_stream_message(othello, stream_name, topic_name=topic_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
um = UserMessage.objects.get(user_profile=user, message_id=msg_id)
|
2017-09-01 13:15:32 +02:00
|
|
|
return um.id
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def assert_read(user_message_id: int) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
um = UserMessage.objects.get(id=user_message_id)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def assert_unread(user_message_id: int) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
um = UserMessage.objects.get(id=user_message_id)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def mute_stream(stream_name: str) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
stream = get_stream(stream_name, realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient = stream.recipient
|
2017-09-01 13:15:32 +02:00
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
recipient=recipient,
|
2017-09-01 13:15:32 +02:00
|
|
|
)
|
2018-08-02 23:46:05 +02:00
|
|
|
subscription.is_muted = True
|
2017-09-01 13:15:32 +02:00
|
|
|
subscription.save()
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def mute_topic(stream_name: str, topic_name: str) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
|
2023-03-03 18:00:27 +01:00
|
|
|
do_mute_topic(
|
|
|
|
user,
|
|
|
|
stream,
|
|
|
|
topic_name,
|
2017-09-01 13:15:32 +02:00
|
|
|
)
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def force_unsubscribe(stream_name: str) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-01 13:15:32 +02:00
|
|
|
We don't want side effects here, since the eventual
|
|
|
|
unsubscribe path may mark messages as read, defeating
|
|
|
|
the test setup here.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-09-01 13:15:32 +02:00
|
|
|
sub = get_subscription(stream_name, user)
|
|
|
|
sub.active = False
|
|
|
|
sub.save()
|
|
|
|
|
|
|
|
# The data setup here is kind of funny, because some of these
|
|
|
|
# conditions should not actually happen in practice going forward,
|
|
|
|
# but we may have had bad data from the past.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mute_stream("Denmark")
|
|
|
|
mute_topic("Verona", "muted_topic")
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
um_normal_id = send_message("Verona", "normal")
|
|
|
|
um_muted_topic_id = send_message("Verona", "muted_topic")
|
|
|
|
um_muted_stream_id = send_message("Denmark", "whatever")
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.subscribe(user, "temporary")
|
|
|
|
um_unsubscribed_id = send_message("temporary", "whatever")
|
|
|
|
force_unsubscribe("temporary")
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
# Verify the setup
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_normal_id)
|
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
|
|
|
assert_unread(um_unsubscribed_id)
|
|
|
|
|
|
|
|
# fix unsubscribed
|
2021-02-12 08:19:30 +01:00
|
|
|
with connection.cursor() as cursor, self.assertLogs(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zulip.fix_unreads", "INFO"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as info_logs:
|
2017-09-01 13:15:32 +02:00
|
|
|
fix_unsubscribed(cursor, user)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info_logs.output[0], "INFO:zulip.fix_unreads:get recipients")
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:[" in info_logs.output[1])
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:elapsed time:" in info_logs.output[2])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output[3],
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.fix_unreads:finding unread messages for non-active streams",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info_logs.output[4], "INFO:zulip.fix_unreads:rows found: 1")
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:elapsed time:" in info_logs.output[5])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output[6],
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.fix_unreads:fixing unread messages for non-active streams",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:elapsed time:" in info_logs.output[7])
|
2020-07-20 00:01:05 +02:00
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
# Muted messages don't change.
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
2020-06-18 09:23:13 +02:00
|
|
|
assert_unread(um_normal_id)
|
2017-09-01 13:15:32 +02:00
|
|
|
|
|
|
|
# The unsubscribed entry should change.
|
|
|
|
assert_read(um_unsubscribed_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs("zulip.fix_unreads", "INFO") as info_logs:
|
2020-07-20 00:01:05 +02:00
|
|
|
# test idempotency
|
|
|
|
fix(user)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info_logs.output[0], f"INFO:zulip.fix_unreads:\n---\nFixing {user.id}:")
|
|
|
|
self.assertEqual(info_logs.output[1], "INFO:zulip.fix_unreads:get recipients")
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:[" in info_logs.output[2])
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:elapsed time:" in info_logs.output[3])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output[4],
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.fix_unreads:finding unread messages for non-active streams",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info_logs.output[5], "INFO:zulip.fix_unreads:rows found: 0")
|
|
|
|
self.assertTrue("INFO:zulip.fix_unreads:elapsed time:" in info_logs.output[6])
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
assert_unread(um_normal_id)
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
|
|
|
assert_read(um_unsubscribed_id)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-02 01:29:06 +02:00
|
|
|
class PushNotificationMarkReadFlowsTest(ZulipTestCase):
|
|
|
|
def get_mobile_push_notification_ids(self, user_profile: UserProfile) -> List[int]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return list(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
)
|
|
|
|
.extra(
|
|
|
|
where=[UserMessage.where_active_push_notification()],
|
|
|
|
)
|
|
|
|
.order_by("message_id")
|
|
|
|
.values_list("message_id", flat=True)
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@mock.patch("zerver.lib.push_notifications.push_notifications_enabled", return_value=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_track_active_mobile_push_notifications(
|
|
|
|
self, mock_push_notifications: mock.MagicMock
|
|
|
|
) -> None:
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.return_value = True
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
user_profile = self.example_user("hamlet")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2018-08-02 01:29:06 +02:00
|
|
|
stream = self.subscribe(user_profile, "test_stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(cordelia, "test_stream")
|
2018-08-02 01:29:06 +02:00
|
|
|
second_stream = self.subscribe(user_profile, "second_stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(cordelia, "second_stream")
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
property_name = "push_notifications"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
|
|
|
user_profile,
|
|
|
|
"/api/v1/users/me/subscriptions/properties",
|
|
|
|
{
|
|
|
|
"subscription_data": orjson.dumps(
|
|
|
|
[{"property": property_name, "value": True, "stream_id": stream.id}]
|
|
|
|
).decode()
|
|
|
|
},
|
|
|
|
)
|
|
|
|
result = self.api_post(
|
|
|
|
user_profile,
|
|
|
|
"/api/v1/users/me/subscriptions/properties",
|
|
|
|
{
|
|
|
|
"subscription_data": orjson.dumps(
|
|
|
|
[{"property": property_name, "value": True, "stream_id": second_stream.id}]
|
|
|
|
).decode()
|
|
|
|
},
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile), [])
|
|
|
|
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
message_id = self.send_stream_message(cordelia, "test_stream", "hello", "test_topic")
|
2021-02-12 08:19:30 +01:00
|
|
|
second_message_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
cordelia, "test_stream", "hello", "other_topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
third_message_id = self.send_stream_message(
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
cordelia, "second_stream", "hello", "test_topic"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[message_id, second_message_id, third_message_id],
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_topic_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": str(stream.id),
|
|
|
|
"topic_name": "test_topic",
|
|
|
|
},
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[second_message_id, third_message_id],
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/mark_stream_as_read",
|
|
|
|
{
|
|
|
|
"stream_id": str(stream.id),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile), [third_message_id])
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
fourth_message_id = self.send_stream_message(
|
|
|
|
self.example_user("cordelia"), "test_stream", "hello", "test_topic"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[third_message_id, fourth_message_id],
|
|
|
|
)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2022-11-01 10:00:38 +01:00
|
|
|
with timeout_mock("zerver.views.message_flags"):
|
2022-10-02 21:32:36 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read", {})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile), [])
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called()
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-02 21:32:36 +02:00
|
|
|
class MarkAllAsReadEndpointTest(ZulipTestCase):
|
|
|
|
def test_mark_all_as_read_endpoint(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.subscribe(hamlet, "Denmark")
|
|
|
|
|
|
|
|
for i in range(0, 4):
|
|
|
|
self.send_stream_message(othello, "Verona", "test")
|
|
|
|
self.send_personal_message(othello, hamlet, "test")
|
|
|
|
|
|
|
|
unread_count = (
|
|
|
|
UserMessage.objects.filter(user_profile=hamlet)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
self.assertNotEqual(unread_count, 0)
|
2022-11-01 10:00:38 +01:00
|
|
|
with timeout_mock("zerver.views.message_flags"):
|
2022-10-02 21:32:36 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read", {})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
new_unread_count = (
|
|
|
|
UserMessage.objects.filter(user_profile=hamlet)
|
|
|
|
.extra(where=[UserMessage.where_unread()])
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
self.assertEqual(new_unread_count, 0)
|
|
|
|
|
|
|
|
def test_mark_all_as_read_timeout_response(self) -> None:
|
|
|
|
self.login("hamlet")
|
2022-11-17 09:30:48 +01:00
|
|
|
with mock.patch("zerver.views.message_flags.timeout", side_effect=TimeoutExpiredError):
|
2022-10-02 21:32:36 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read", {})
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
|
|
|
|
result_dict = orjson.loads(result.content)
|
|
|
|
self.assertEqual(
|
|
|
|
result_dict, {"result": "partially_completed", "msg": "", "code": "REQUEST_TIMEOUT"}
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-06-29 13:19:17 +02:00
|
|
|
class GetUnreadMsgsTest(ZulipTestCase):
|
|
|
|
def mute_stream(self, user_profile: UserProfile, stream: Stream) -> None:
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
|
|
|
subscription.is_muted = True
|
|
|
|
subscription.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def mute_topic(self, user_profile: UserProfile, stream_name: str, topic_name: str) -> None:
|
2020-06-29 13:19:17 +02:00
|
|
|
realm = user_profile.realm
|
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
|
2023-03-03 18:00:27 +01:00
|
|
|
do_mute_topic(
|
|
|
|
user_profile,
|
|
|
|
stream,
|
|
|
|
topic_name,
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-29 13:19:17 +02:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for stream_name in ["social", "devel", "test here"]:
|
2020-06-29 13:19:17 +02:00
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
|
|
|
|
all_message_ids: Set[int] = set()
|
2020-09-02 08:14:51 +02:00
|
|
|
message_ids = {}
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
tups = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("social", "lunch"),
|
|
|
|
("test here", "bla"),
|
|
|
|
("devel", "python"),
|
|
|
|
("devel", "ruby"),
|
2020-06-29 13:19:17 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for stream_name, topic_name in tups:
|
|
|
|
message_ids[topic_name] = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=cordelia,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
for i in range(3)
|
2020-06-29 13:19:17 +02:00
|
|
|
]
|
|
|
|
all_message_ids |= set(message_ids[topic_name])
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(all_message_ids, 12) # sanity check on test setup
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.mute_stream(
|
|
|
|
user_profile=hamlet,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream=get_stream("test here", realm),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.mute_topic(
|
|
|
|
user_profile=hamlet,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name="devel",
|
|
|
|
topic_name="ruby",
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict = raw_unread_data["stream_dict"]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(stream_dict.keys()),
|
|
|
|
all_message_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
raw_unread_data["unmuted_stream_msgs"],
|
|
|
|
set(message_ids["python"]) | set(message_ids["lunch"]),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict[message_ids["lunch"][0]],
|
2020-06-29 13:19:17 +02:00
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_id=get_stream("social", realm).id,
|
|
|
|
topic="lunch",
|
2020-06-29 13:19:17 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_huddle(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
prospero = self.example_user("prospero")
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
huddle1_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia,
|
|
|
|
[hamlet, othello],
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
huddle2_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia,
|
|
|
|
[hamlet, prospero],
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
huddle_dict = raw_unread_data["huddle_dict"]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(huddle_dict.keys()),
|
|
|
|
set(huddle1_message_ids) | set(huddle2_message_ids),
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
huddle_string = ",".join(str(uid) for uid in sorted([cordelia.id, hamlet.id, othello.id]))
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
huddle_dict[huddle1_message_ids[0]],
|
|
|
|
dict(user_ids_string=huddle_string),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_personal(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
cordelia_pm_message_ids = [self.send_personal_message(cordelia, hamlet) for i in range(3)]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
othello_pm_message_ids = [self.send_personal_message(othello, hamlet) for i in range(3)]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
pm_dict = raw_unread_data["pm_dict"]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
set(cordelia_pm_message_ids) | set(othello_pm_message_ids),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[cordelia_pm_message_ids[0]],
|
2022-03-07 16:47:49 +01:00
|
|
|
dict(other_user_id=cordelia.id),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_personal_from_self(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
def send_unread_pm(other_user: UserProfile) -> Message:
|
|
|
|
# It is rare to send a message from Hamlet to Othello
|
|
|
|
# (or any other user) and have it be unread for
|
|
|
|
# Hamlet himself, but that is actually normal
|
|
|
|
# behavior for most API clients.
|
|
|
|
message_id = self.send_personal_message(
|
|
|
|
from_user=hamlet,
|
|
|
|
to_user=other_user,
|
2021-02-12 08:20:45 +01:00
|
|
|
sending_client_name="some_api_program",
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Check our test setup is correct--the message should
|
|
|
|
# not have looked like it was sent by a human.
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
self.assertFalse(message.sent_by_human())
|
|
|
|
|
|
|
|
# And since it was not sent by a human, it should not
|
|
|
|
# be read, not even by the sender (Hamlet).
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
|
|
|
return message
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2020-06-29 13:19:17 +02:00
|
|
|
othello_msg = send_unread_pm(other_user=othello)
|
|
|
|
|
|
|
|
# And now check the unread data structure...
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
pm_dict = raw_unread_data["pm_dict"]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(set(pm_dict.keys()), {othello_msg.id})
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[othello_msg.id],
|
2022-03-07 16:47:49 +01:00
|
|
|
dict(other_user_id=othello.id),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-06-29 13:19:17 +02:00
|
|
|
cordelia_msg = send_unread_pm(other_user=cordelia)
|
|
|
|
|
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile=hamlet,
|
|
|
|
state=raw_unread_data,
|
|
|
|
message=MessageDict.wide_dict(cordelia_msg),
|
|
|
|
flags=[],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[cordelia_msg.id],
|
2022-03-07 16:47:49 +01:00
|
|
|
dict(other_user_id=cordelia.id),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Send a message to ourself.
|
|
|
|
hamlet_msg = send_unread_pm(other_user=hamlet)
|
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile=hamlet,
|
|
|
|
state=raw_unread_data,
|
|
|
|
message=MessageDict.wide_dict(hamlet_msg),
|
|
|
|
flags=[],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id, hamlet_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[hamlet_msg.id],
|
2022-03-07 16:47:49 +01:00
|
|
|
dict(other_user_id=hamlet.id),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Call get_raw_unread_data again.
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
pm_dict = raw_unread_data["pm_dict"]
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id, hamlet_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[hamlet_msg.id],
|
2022-03-07 16:47:49 +01:00
|
|
|
dict(other_user_id=hamlet.id),
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_unread_msgs(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("cordelia")
|
2020-06-29 13:19:17 +02:00
|
|
|
sender_id = sender.id
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
2020-06-29 13:19:17 +02:00
|
|
|
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(sender, "Denmark")
|
|
|
|
|
2020-06-29 13:19:17 +02:00
|
|
|
pm1_message_id = self.send_personal_message(sender, user_profile, "hello1")
|
|
|
|
pm2_message_id = self.send_personal_message(sender, user_profile, "hello2")
|
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
muted_stream = self.subscribe(user_profile, "Muted stream")
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(sender, muted_stream.name)
|
2020-06-29 13:19:17 +02:00
|
|
|
self.mute_stream(user_profile, muted_stream)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.mute_topic(user_profile, "Denmark", "muted-topic")
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
stream_message_id = self.send_stream_message(sender, "Denmark", "hello")
|
2021-05-10 07:02:14 +02:00
|
|
|
muted_stream_message_id = self.send_stream_message(sender, "Muted stream", "hello")
|
2020-06-29 13:19:17 +02:00
|
|
|
muted_topic_message_id = self.send_stream_message(
|
|
|
|
sender,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="muted-topic",
|
|
|
|
content="hello",
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_message_id = self.send_huddle_message(
|
|
|
|
sender,
|
|
|
|
[user_profile, othello],
|
2021-02-12 08:20:45 +01:00
|
|
|
"hello3",
|
2020-06-29 13:19:17 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_unread_data() -> UnreadMessagesResult:
|
|
|
|
raw_unread_data = get_raw_unread_data(user_profile)
|
|
|
|
aggregated_data = aggregate_unread_data(raw_unread_data)
|
|
|
|
return aggregated_data
|
|
|
|
|
2021-03-18 22:33:52 +01:00
|
|
|
with mock.patch("zerver.lib.message.MAX_UNREAD_MESSAGES", 4):
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result["count"], 2)
|
|
|
|
self.assertTrue(result["old_unreads_missing"])
|
|
|
|
|
2020-06-29 13:19:17 +02:00
|
|
|
result = get_unread_data()
|
|
|
|
|
|
|
|
# The count here reflects the count of unread messages that we will
|
|
|
|
# report to users in the bankruptcy dialog, and for now it excludes unread messages
|
2022-02-08 00:13:33 +01:00
|
|
|
# from muted streams, but it doesn't exclude unread messages from muted topics yet.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["count"], 4)
|
2021-03-18 22:33:52 +01:00
|
|
|
self.assertFalse(result["old_unreads_missing"])
|
2021-02-12 08:20:45 +01:00
|
|
|
|
|
|
|
unread_pm = result["pms"][0]
|
|
|
|
self.assertEqual(unread_pm["sender_id"], sender_id)
|
|
|
|
self.assertEqual(unread_pm["unread_message_ids"], [pm1_message_id, pm2_message_id])
|
|
|
|
|
|
|
|
unread_stream = result["streams"][0]
|
|
|
|
self.assertEqual(unread_stream["stream_id"], get_stream("Denmark", user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream["topic"], "muted-topic")
|
|
|
|
self.assertEqual(unread_stream["unread_message_ids"], [muted_topic_message_id])
|
|
|
|
|
|
|
|
unread_stream = result["streams"][1]
|
|
|
|
self.assertEqual(unread_stream["stream_id"], get_stream("Denmark", user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream["topic"], "test")
|
|
|
|
self.assertEqual(unread_stream["unread_message_ids"], [stream_message_id])
|
|
|
|
|
|
|
|
unread_stream = result["streams"][2]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-05-10 07:02:14 +02:00
|
|
|
unread_stream["stream_id"], get_stream("Muted stream", user_profile.realm).id
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(unread_stream["topic"], "test")
|
|
|
|
self.assertEqual(unread_stream["unread_message_ids"], [muted_stream_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
huddle_string = ",".join(
|
2021-02-12 08:19:30 +01:00
|
|
|
str(uid) for uid in sorted([sender_id, user_profile.id, othello.id])
|
|
|
|
)
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
unread_huddle = result["huddles"][0]
|
|
|
|
self.assertEqual(unread_huddle["user_ids_string"], huddle_string)
|
|
|
|
self.assertEqual(unread_huddle["unread_message_ids"], [huddle_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=stream_message_id,
|
|
|
|
)
|
|
|
|
um.flags |= UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [stream_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
# TODO: This should change when we make alert words work better.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [stream_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
# Test with a muted stream
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=muted_stream_message_id,
|
|
|
|
)
|
|
|
|
um.flags = UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [muted_stream_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
# Test with a muted topic
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=muted_topic_message_id,
|
|
|
|
)
|
|
|
|
um.flags = UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [muted_topic_message_id])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["mentions"], [])
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-08 00:35:59 +02:00
|
|
|
class MessageAccessTests(ZulipTestCase):
|
|
|
|
def test_update_invalid_flags(self) -> None:
|
|
|
|
message = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"hello",
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([message]).decode(), "op": "add", "flag": "invalid"},
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assert_json_error(result, "Invalid flag: 'invalid'")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([message]).decode(), "op": "add", "flag": "is_private"},
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assert_json_error(result, "Invalid flag: 'is_private'")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{
|
|
|
|
"messages": orjson.dumps([message]).decode(),
|
|
|
|
"op": "add",
|
|
|
|
"flag": "active_mobile_push_notification",
|
|
|
|
},
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assert_json_error(result, "Invalid flag: 'active_mobile_push_notification'")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([message]).decode(), "op": "add", "flag": "mentioned"},
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assert_json_error(result, "Flag not editable: 'mentioned'")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps([message]).decode(), "op": "bogus", "flag": "starred"},
|
|
|
|
)
|
2020-08-28 23:01:45 +02:00
|
|
|
self.assert_json_error(result, "Invalid message flag operation: 'bogus'")
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def change_star(
|
|
|
|
self, messages: List[int], add: bool = True, **kwargs: Any
|
|
|
|
) -> "TestHttpResponse":
|
2021-02-12 08:19:30 +01:00
|
|
|
return self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{
|
|
|
|
"messages": orjson.dumps(messages).decode(),
|
|
|
|
"op": "add" if add else "remove",
|
|
|
|
"flag": "starred",
|
|
|
|
},
|
|
|
|
**kwargs,
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
def test_change_star(self) -> None:
|
|
|
|
"""
|
|
|
|
You can set a message as starred/un-starred through
|
|
|
|
POST /json/messages/flags.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
message_ids = [
|
|
|
|
self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("hamlet"), "test"
|
|
|
|
)
|
|
|
|
]
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Star a message.
|
|
|
|
result = self.change_star(message_ids)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
if msg["id"] in message_ids:
|
|
|
|
check_flags(msg["flags"], {"starred"})
|
2020-07-08 00:35:59 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
check_flags(msg["flags"], {"read"})
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2020-07-09 15:48:10 +02:00
|
|
|
# Remove the stars.
|
2020-07-08 00:35:59 +02:00
|
|
|
result = self.change_star(message_ids, False)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
if msg["id"] in message_ids:
|
|
|
|
check_flags(msg["flags"], set())
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2022-03-10 19:52:55 +01:00
|
|
|
def test_change_collapsed_public_stream_historical(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
stream_name = "new_stream"
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.login_user(hamlet)
|
|
|
|
message_id = self.send_stream_message(hamlet, stream_name, "test")
|
|
|
|
|
|
|
|
# Now login as another user who wasn't on that stream
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.login_user(cordelia)
|
|
|
|
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
dict(messages=orjson.dumps([message_id]).decode(), op="add", flag="collapsed"),
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
um = UserMessage.objects.get(user_profile_id=cordelia.id, message_id=message_id)
|
|
|
|
self.assertEqual(um.flags_list(), ["read", "collapsed", "historical"])
|
|
|
|
|
2020-07-08 00:35:59 +02:00
|
|
|
def test_change_star_public_stream_historical(self) -> None:
|
|
|
|
"""
|
|
|
|
You can set a message as starred/un-starred through
|
|
|
|
POST /json/messages/flags.
|
|
|
|
"""
|
|
|
|
stream_name = "new_stream"
|
|
|
|
self.subscribe(self.example_user("hamlet"), stream_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-08 00:35:59 +02:00
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
|
|
|
|
]
|
|
|
|
# Send a second message so we can verify it isn't modified
|
|
|
|
other_message_ids = [
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), stream_name, "test_unused"),
|
|
|
|
]
|
|
|
|
received_message_ids = [
|
|
|
|
self.send_personal_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"test_received",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
# Now login as another user who wasn't on that stream
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2020-07-08 00:35:59 +02:00
|
|
|
# Send a message to yourself to make sure we have at least one with the read flag
|
|
|
|
sent_message_ids = [
|
|
|
|
self.send_personal_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"test_read_message",
|
|
|
|
),
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(sent_message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Confirm that one can change the historical flag now
|
|
|
|
result = self.change_star(message_ids)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
if msg["id"] in message_ids:
|
|
|
|
check_flags(msg["flags"], {"starred", "historical", "read"})
|
|
|
|
elif msg["id"] in received_message_ids:
|
|
|
|
check_flags(msg["flags"], set())
|
2020-07-08 00:35:59 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
check_flags(msg["flags"], {"read"})
|
|
|
|
self.assertNotIn(msg["id"], other_message_ids)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
result = self.change_star(message_ids, False)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# But it still doesn't work if you're in another realm
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.mit_user("sipbtest")
|
2020-07-08 00:35:59 +02:00
|
|
|
self.login_user(user)
|
|
|
|
result = self.change_star(message_ids, subdomain="zephyr")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
def test_change_star_private_message_security(self) -> None:
|
|
|
|
"""
|
|
|
|
You can set a message as starred/un-starred through
|
|
|
|
POST /json/messages/flags.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-08 00:35:59 +02:00
|
|
|
message_ids = [
|
|
|
|
self.send_personal_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"test",
|
|
|
|
),
|
|
|
|
]
|
|
|
|
|
|
|
|
# Starring private messages you didn't receive fails.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2020-07-08 00:35:59 +02:00
|
|
|
result = self.change_star(message_ids)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
def test_change_star_private_stream_security(self) -> None:
|
|
|
|
stream_name = "private_stream"
|
|
|
|
self.make_stream(stream_name, invite_only=True)
|
|
|
|
self.subscribe(self.example_user("hamlet"), stream_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-08 00:35:59 +02:00
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
|
|
|
|
]
|
|
|
|
|
|
|
|
# Starring private stream messages you received works
|
|
|
|
result = self.change_star(message_ids)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Starring private stream messages you didn't receive fails.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
result = self.change_star(message_ids)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
stream_name = "private_stream_2"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(stream_name, invite_only=True, history_public_to_subscribers=True)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), stream_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-07-08 00:35:59 +02:00
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), stream_name, "test"),
|
|
|
|
]
|
|
|
|
|
|
|
|
# With stream.history_public_to_subscribers = True, you still
|
|
|
|
# can't see it if you didn't receive the message and are
|
|
|
|
# not subscribed.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
result = self.change_star(message_ids)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# But if you subscribe, then you can star the message
|
|
|
|
self.subscribe(self.example_user("cordelia"), stream_name)
|
|
|
|
result = self.change_star(message_ids)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_new_message(self) -> None:
|
|
|
|
"""
|
|
|
|
New messages aren't starred.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("hamlet")
|
2020-07-08 00:35:59 +02:00
|
|
|
self.login_user(sender)
|
|
|
|
content = "Test message for star"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_stream_message(sender, "Verona", content=content)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
sent_message = (
|
|
|
|
UserMessage.objects.filter(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile=self.example_user("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.order_by("id")
|
|
|
|
.reverse()[0]
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assertEqual(sent_message.message.content, content)
|
|
|
|
self.assertFalse(sent_message.flags.starred)
|
|
|
|
|
|
|
|
def test_change_star_public_stream_security_for_guest_user(self) -> None:
|
|
|
|
# Guest user can't access(star) unsubscribed public stream messages
|
|
|
|
normal_user = self.example_user("hamlet")
|
|
|
|
stream_name = "public_stream"
|
|
|
|
self.make_stream(stream_name)
|
|
|
|
self.subscribe(normal_user, stream_name)
|
|
|
|
self.login_user(normal_user)
|
|
|
|
|
|
|
|
message_id = [
|
|
|
|
self.send_stream_message(normal_user, stream_name, "test 1"),
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
guest_user = self.example_user("polonius")
|
2020-07-08 00:35:59 +02:00
|
|
|
self.login_user(guest_user)
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
result = self.change_star(message_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Subscribed guest users can access public stream messages sent before they join
|
|
|
|
self.subscribe(guest_user, stream_name)
|
|
|
|
result = self.change_star(message_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# And messages sent after they join
|
|
|
|
self.login_user(normal_user)
|
|
|
|
message_id = [
|
|
|
|
self.send_stream_message(normal_user, stream_name, "test 2"),
|
|
|
|
]
|
|
|
|
self.login_user(guest_user)
|
|
|
|
result = self.change_star(message_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_change_star_private_stream_security_for_guest_user(self) -> None:
|
|
|
|
# Guest users can't access(star) unsubscribed private stream messages
|
|
|
|
normal_user = self.example_user("hamlet")
|
|
|
|
stream_name = "private_stream"
|
|
|
|
stream = self.make_stream(stream_name, invite_only=True)
|
|
|
|
self.subscribe(normal_user, stream_name)
|
|
|
|
self.login_user(normal_user)
|
|
|
|
|
|
|
|
message_id = [
|
|
|
|
self.send_stream_message(normal_user, stream_name, "test 1"),
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
guest_user = self.example_user("polonius")
|
2020-07-08 00:35:59 +02:00
|
|
|
self.login_user(guest_user)
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
result = self.change_star(message_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Guest user can't access messages of subscribed private streams if
|
|
|
|
# history is not public to subscribers
|
|
|
|
self.subscribe(guest_user, stream_name)
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
result = self.change_star(message_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Invalid message(s)")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Guest user can access messages of subscribed private streams if
|
|
|
|
# history is public to subscribers
|
2021-12-11 00:41:25 +01:00
|
|
|
do_change_stream_permission(
|
2022-08-01 12:54:46 +02:00
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=guest_user,
|
2021-12-11 00:41:25 +01:00
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
result = self.change_star(message_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# With history not public to subscribers, they can still see new messages
|
2021-12-11 00:41:25 +01:00
|
|
|
do_change_stream_permission(
|
2022-08-01 12:54:46 +02:00
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=False,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=guest_user,
|
2021-12-11 00:41:25 +01:00
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.login_user(normal_user)
|
|
|
|
message_id = [
|
|
|
|
self.send_stream_message(normal_user, stream_name, "test 2"),
|
|
|
|
]
|
|
|
|
self.login_user(guest_user)
|
|
|
|
result = self.change_star(message_id)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_bulk_access_messages_private_stream(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login_user(user)
|
|
|
|
|
|
|
|
stream_name = "private_stream"
|
2021-02-12 08:19:30 +01:00
|
|
|
stream = self.make_stream(
|
|
|
|
stream_name, invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
# Send a message before subscribing a new user to stream
|
2021-02-12 08:19:30 +01:00
|
|
|
message_one_id = self.send_stream_message(user, stream_name, "Message one")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
later_subscribed_user = self.example_user("cordelia")
|
|
|
|
# Subscribe a user to private-protected history stream
|
|
|
|
self.subscribe(later_subscribed_user, stream_name)
|
|
|
|
|
|
|
|
# Send a message after subscribing a new user to stream
|
2021-02-12 08:19:30 +01:00
|
|
|
message_two_id = self.send_stream_message(user, stream_name, "Message two")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
message_ids = [message_one_id, message_two_id]
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [
|
|
|
|
Message.objects.select_related().get(id=message_id) for message_id in message_ids
|
|
|
|
]
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
2021-05-12 23:21:39 +02:00
|
|
|
filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Message sent before subscribing wouldn't be accessible by later
|
|
|
|
# subscribed user as stream has protected history
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(filtered_messages, 1)
|
2020-07-08 00:35:59 +02:00
|
|
|
self.assertEqual(filtered_messages[0].id, message_two_id)
|
|
|
|
|
2021-12-11 00:41:25 +01:00
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
2022-08-01 12:54:46 +02:00
|
|
|
is_web_public=False,
|
2021-12-11 00:41:25 +01:00
|
|
|
acting_user=self.example_user("cordelia"),
|
|
|
|
)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
2021-05-12 23:21:39 +02:00
|
|
|
filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# Message sent before subscribing are accessible by 8user as stream
|
|
|
|
# don't have protected history
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(filtered_messages, 2)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2022-08-30 22:29:54 +02:00
|
|
|
# Testing messages accessibility for an unsubscribed user
|
2020-07-08 00:35:59 +02:00
|
|
|
unsubscribed_user = self.example_user("ZOE")
|
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
2021-05-12 23:21:39 +02:00
|
|
|
filtered_messages = bulk_access_messages(unsubscribed_user, messages, stream=stream)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(filtered_messages, 0)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2021-05-12 23:21:39 +02:00
|
|
|
# Verify an exception is thrown if called where the passed
|
|
|
|
# stream not matching the messages.
|
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
bulk_access_messages(
|
|
|
|
unsubscribed_user, messages, stream=get_stream("Denmark", unsubscribed_user.realm)
|
|
|
|
)
|
|
|
|
|
2020-07-08 00:35:59 +02:00
|
|
|
def test_bulk_access_messages_public_stream(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login_user(user)
|
|
|
|
|
2022-08-30 22:29:54 +02:00
|
|
|
# Testing messages accessibility including a public stream message
|
2020-07-08 00:35:59 +02:00
|
|
|
stream_name = "public_stream"
|
2021-05-12 23:21:39 +02:00
|
|
|
stream = self.subscribe(user, stream_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
message_one_id = self.send_stream_message(user, stream_name, "Message one")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
later_subscribed_user = self.example_user("cordelia")
|
|
|
|
self.subscribe(later_subscribed_user, stream_name)
|
|
|
|
|
|
|
|
# Send a message after subscribing a new user to stream
|
2021-02-12 08:19:30 +01:00
|
|
|
message_two_id = self.send_stream_message(user, stream_name, "Message two")
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
message_ids = [message_one_id, message_two_id]
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [
|
|
|
|
Message.objects.select_related().get(id=message_id) for message_id in message_ids
|
|
|
|
]
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
# All public stream messages are always accessible
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
2021-05-12 23:21:39 +02:00
|
|
|
filtered_messages = bulk_access_messages(later_subscribed_user, messages, stream=stream)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(filtered_messages, 2)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
|
|
|
unsubscribed_user = self.example_user("ZOE")
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(2):
|
2021-05-12 23:21:39 +02:00
|
|
|
filtered_messages = bulk_access_messages(unsubscribed_user, messages, stream=stream)
|
2020-07-08 00:35:59 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(filtered_messages, 2)
|
2020-07-08 02:29:18 +02:00
|
|
|
|
|
|
|
|
|
|
|
class PersonalMessagesFlagTest(ZulipTestCase):
|
|
|
|
def test_is_private_flag_not_leaked(self) -> None:
|
|
|
|
"""
|
|
|
|
Make sure `is_private` flag is not leaked to the API.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("cordelia"), "test"
|
|
|
|
)
|
2020-07-08 02:29:18 +02:00
|
|
|
|
|
|
|
for msg in self.get_messages():
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertNotIn("is_private", msg["flags"])
|
2021-06-09 13:31:39 +02:00
|
|
|
|
|
|
|
|
|
|
|
class MarkUnreadTest(ZulipTestCase):
|
2022-05-30 17:46:05 +02:00
|
|
|
def mute_stream(self, stream_name: str, user: UserProfile) -> None:
|
2021-06-09 13:31:39 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
recipient = stream.recipient
|
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user,
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
|
|
|
subscription.is_muted = True
|
|
|
|
subscription.save()
|
|
|
|
|
|
|
|
def test_missing_usermessage_record(self) -> None:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
stream_name = "Some new stream"
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
|
|
|
|
message_id1 = self.send_stream_message(
|
|
|
|
sender=cordelia,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name="lunch",
|
|
|
|
content="whatever",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.subscribe(othello, stream_name)
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=othello,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(raw_unread_data["stream_dict"], {})
|
|
|
|
|
|
|
|
message_id2 = self.send_stream_message(
|
|
|
|
sender=cordelia,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name="lunch",
|
|
|
|
content="whatever",
|
|
|
|
)
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=othello,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(raw_unread_data["stream_dict"].keys(), {message_id2})
|
|
|
|
|
|
|
|
do_update_message_flags(othello, "remove", "read", [message_id1])
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=othello,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(raw_unread_data["stream_dict"].keys(), {message_id1, message_id2})
|
|
|
|
|
|
|
|
def test_format_unread_message_details(self) -> None:
|
|
|
|
user = self.example_user("cordelia")
|
|
|
|
message_id = 999
|
|
|
|
|
|
|
|
# send message to self
|
|
|
|
pm_dict = {
|
|
|
|
message_id: RawUnreadPrivateMessageDict(other_user_id=user.id),
|
|
|
|
}
|
|
|
|
|
|
|
|
raw_unread_data = RawUnreadMessagesResult(
|
|
|
|
pm_dict=pm_dict,
|
|
|
|
stream_dict={},
|
|
|
|
huddle_dict={},
|
|
|
|
mentions=set(),
|
|
|
|
muted_stream_ids=[],
|
|
|
|
unmuted_stream_msgs=set(),
|
|
|
|
old_unreads_missing=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
message_details = format_unread_message_details(user.id, raw_unread_data)
|
|
|
|
self.assertEqual(
|
|
|
|
message_details,
|
|
|
|
{
|
|
|
|
str(message_id): dict(type="private", user_ids=[]),
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_add_message_to_unread_msgs(self) -> None:
|
|
|
|
user = self.example_user("cordelia")
|
|
|
|
message_id = 999
|
|
|
|
|
|
|
|
raw_unread_data = RawUnreadMessagesResult(
|
|
|
|
pm_dict={},
|
|
|
|
stream_dict={},
|
|
|
|
huddle_dict={},
|
|
|
|
mentions=set(),
|
|
|
|
muted_stream_ids=[],
|
|
|
|
unmuted_stream_msgs=set(),
|
|
|
|
old_unreads_missing=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# message to self
|
|
|
|
message_details = MessageDetailsDict(type="private", user_ids=[])
|
|
|
|
add_message_to_unread_msgs(user.id, raw_unread_data, message_id, message_details)
|
|
|
|
self.assertEqual(
|
|
|
|
raw_unread_data["pm_dict"],
|
|
|
|
{message_id: RawUnreadPrivateMessageDict(other_user_id=user.id)},
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_stream_messages_unread(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
stream_name = "Denmark"
|
|
|
|
stream = self.subscribe(receiver, stream_name)
|
|
|
|
self.subscribe(sender, stream_name)
|
|
|
|
topic_name = "test"
|
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
|
|
|
self.assertEqual(
|
|
|
|
event["message_details"][message_id],
|
|
|
|
dict(
|
|
|
|
type="stream",
|
|
|
|
topic="test",
|
|
|
|
unmuted_stream_msg=True,
|
|
|
|
stream_id=stream.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
|
|
|
def test_stream_messages_unread_muted(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
stream_name = "Denmark"
|
|
|
|
stream = self.subscribe(receiver, stream_name)
|
|
|
|
self.subscribe(sender, stream_name)
|
|
|
|
topic_name = "test"
|
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.mute_stream(stream_name, receiver)
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
|
|
|
self.assertEqual(
|
|
|
|
event["message_details"][message_id],
|
|
|
|
dict(
|
|
|
|
type="stream",
|
|
|
|
topic="test",
|
|
|
|
unmuted_stream_msg=False,
|
|
|
|
stream_id=stream.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
|
|
|
def test_stream_messages_unread_mention(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
stream_name = "Denmark"
|
|
|
|
stream = self.subscribe(receiver, stream_name)
|
|
|
|
self.subscribe(sender, stream_name)
|
|
|
|
topic_name = "test"
|
|
|
|
message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
content="@**King Hamlet**",
|
|
|
|
)
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
|
|
|
self.assertEqual(
|
|
|
|
event["message_details"][message_id],
|
|
|
|
dict(
|
|
|
|
type="stream",
|
|
|
|
mentioned=True,
|
|
|
|
topic="test",
|
|
|
|
unmuted_stream_msg=True,
|
|
|
|
stream_id=stream.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
2022-11-02 18:06:21 +01:00
|
|
|
def test_unsubscribed_stream_messages_unread(self) -> None:
|
|
|
|
"""An extended test verifying that the `update_message_flags` endpoint
|
|
|
|
correctly preserves the invariant that messages cannot be
|
|
|
|
marked unread in streams a user is not currently subscribed
|
|
|
|
to.
|
|
|
|
"""
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
2022-11-05 11:20:15 +01:00
|
|
|
stream_name = "Test stream"
|
2022-11-02 18:06:21 +01:00
|
|
|
topic_name = "test"
|
2022-11-05 11:20:15 +01:00
|
|
|
self.subscribe(sender, stream_name)
|
|
|
|
before_subscribe_stream_message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(2)
|
|
|
|
]
|
|
|
|
|
|
|
|
self.subscribe(receiver, stream_name)
|
2022-11-02 18:06:21 +01:00
|
|
|
subscribed_stream_message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(2)
|
|
|
|
]
|
|
|
|
stream_name = "Verona"
|
|
|
|
sub = get_subscription(stream_name, receiver)
|
|
|
|
self.assertTrue(sub.active)
|
|
|
|
unsubscribed_stream_message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(2)
|
|
|
|
]
|
|
|
|
# Unsubscribing generates an event in the deferred_work queue
|
|
|
|
# that marks the above messages as read.
|
|
|
|
self.unsubscribe(receiver, stream_name)
|
|
|
|
after_unsubscribe_stream_message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(2)
|
|
|
|
]
|
|
|
|
|
|
|
|
stream_name = "New-stream"
|
|
|
|
self.subscribe(sender, stream_name)
|
|
|
|
never_subscribed_stream_message_ids = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
for i in range(2)
|
|
|
|
]
|
|
|
|
|
|
|
|
message_ids = (
|
|
|
|
subscribed_stream_message_ids
|
|
|
|
+ unsubscribed_stream_message_ids
|
|
|
|
+ after_unsubscribe_stream_message_ids
|
|
|
|
+ never_subscribed_stream_message_ids
|
|
|
|
)
|
|
|
|
# Before doing anything, verify the state of each message's flags.
|
|
|
|
for message_id in subscribed_stream_message_ids + unsubscribed_stream_message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertEqual(um.flags.read, message_id in unsubscribed_stream_message_ids)
|
|
|
|
for message_id in (
|
2022-11-05 11:20:15 +01:00
|
|
|
before_subscribe_stream_message_ids
|
|
|
|
+ never_subscribed_stream_message_ids
|
|
|
|
+ after_unsubscribe_stream_message_ids
|
2022-11-02 18:06:21 +01:00
|
|
|
):
|
|
|
|
self.assertFalse(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
# First, try marking them all as unread; should be a noop. The
|
|
|
|
# ones that already have UserMessage rows are already unread,
|
|
|
|
# and the others don't have UserMessage rows and cannot be
|
|
|
|
# marked as unread without first subscribing.
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=0):
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "remove", "flag": "read"},
|
|
|
|
)
|
|
|
|
for message_id in subscribed_stream_message_ids + unsubscribed_stream_message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertEqual(um.flags.read, message_id in unsubscribed_stream_message_ids)
|
|
|
|
for message_id in (
|
|
|
|
never_subscribed_stream_message_ids + after_unsubscribe_stream_message_ids
|
|
|
|
):
|
|
|
|
self.assertFalse(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
2022-11-05 11:20:15 +01:00
|
|
|
# Now, explicitly mark them all as read. The messages which don't
|
|
|
|
# have UserMessage rows will be ignored.
|
|
|
|
message_ids = before_subscribe_stream_message_ids + message_ids
|
2022-11-02 18:06:21 +01:00
|
|
|
self.login("hamlet")
|
2022-11-05 11:20:15 +01:00
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
2022-11-02 18:06:21 +01:00
|
|
|
self.assert_json_success(result)
|
2022-11-05 11:20:15 +01:00
|
|
|
event = events[0]["event"]
|
2022-11-17 00:50:22 +01:00
|
|
|
self.assertEqual(event["messages"], subscribed_stream_message_ids)
|
2022-11-02 18:06:21 +01:00
|
|
|
|
2022-11-05 11:20:15 +01:00
|
|
|
for message_id in subscribed_stream_message_ids + unsubscribed_stream_message_ids:
|
2022-11-02 18:06:21 +01:00
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
2022-11-05 11:20:15 +01:00
|
|
|
for message_id in (
|
|
|
|
before_subscribe_stream_message_ids
|
|
|
|
+ never_subscribed_stream_message_ids
|
|
|
|
+ after_unsubscribe_stream_message_ids
|
|
|
|
):
|
|
|
|
self.assertFalse(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
2022-11-02 18:06:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Now, request marking them all as unread. Since we haven't
|
|
|
|
# resubscribed to any of the streams, we expect this to not
|
|
|
|
# modify the messages in streams we're not subscribed to.
|
2022-11-05 11:20:15 +01:00
|
|
|
#
|
|
|
|
# This also create new 'historical' UserMessage rows for the
|
|
|
|
# messages in subscribed streams that didn't have them
|
|
|
|
# previously.
|
2022-11-02 18:06:21 +01:00
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "remove", "flag": "read"},
|
|
|
|
)
|
|
|
|
event = events[0]["event"]
|
2022-11-05 11:20:15 +01:00
|
|
|
self.assertEqual(
|
|
|
|
event["messages"], before_subscribe_stream_message_ids + subscribed_stream_message_ids
|
|
|
|
)
|
|
|
|
unread_message_ids = {
|
|
|
|
str(message_id)
|
|
|
|
for message_id in before_subscribe_stream_message_ids + subscribed_stream_message_ids
|
|
|
|
}
|
2022-11-02 18:06:21 +01:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
|
2022-11-05 11:20:15 +01:00
|
|
|
for message_id in before_subscribe_stream_message_ids + subscribed_stream_message_ids:
|
2022-11-02 18:06:21 +01:00
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
2022-11-05 11:20:15 +01:00
|
|
|
for message_id in unsubscribed_stream_message_ids:
|
2022-11-02 18:06:21 +01:00
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
2022-11-05 11:20:15 +01:00
|
|
|
for message_id in (
|
|
|
|
after_unsubscribe_stream_message_ids + never_subscribed_stream_message_ids
|
|
|
|
):
|
|
|
|
self.assertFalse(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
2021-06-09 13:31:39 +02:00
|
|
|
def test_pm_messages_unread(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
message_ids = [
|
|
|
|
self.send_personal_message(sender, receiver, content="Hello") for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
|
|
|
self.assertEqual(
|
|
|
|
event["message_details"][message_id],
|
|
|
|
dict(
|
|
|
|
type="private",
|
|
|
|
user_ids=[sender.id],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
|
|
|
def test_pm_messages_unread_mention(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
stream_name = "Denmark"
|
|
|
|
self.subscribe(receiver, stream_name)
|
|
|
|
message_ids = [
|
|
|
|
self.send_personal_message(sender, receiver, content="@**King Hamlet**")
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
|
|
|
self.assertEqual(
|
|
|
|
event["message_details"][message_id],
|
|
|
|
dict(
|
|
|
|
type="private",
|
|
|
|
user_ids=[sender.id],
|
|
|
|
mentioned=True,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
|
|
|
def test_huddle_messages_unread(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
user1 = self.example_user("othello")
|
|
|
|
message_ids = [
|
|
|
|
# self.send_huddle_message(sender, receiver, content="Hello") for i in range(4)
|
|
|
|
self.send_huddle_message(sender, [receiver, user1])
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
2022-07-19 22:37:30 +02:00
|
|
|
self.assertNotIn("mentioned", event["message_details"][message_id])
|
2021-06-09 13:31:39 +02:00
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
|
|
|
def test_huddle_messages_unread_mention(self) -> None:
|
|
|
|
sender = self.example_user("cordelia")
|
|
|
|
receiver = self.example_user("hamlet")
|
|
|
|
user1 = self.example_user("othello")
|
|
|
|
message_ids = [
|
|
|
|
# self.send_huddle_message(sender, receiver, content="Hello") for i in range(4)
|
|
|
|
self.send_huddle_message(
|
|
|
|
from_user=sender, to_users=[receiver, user1], content="@**King Hamlet**"
|
|
|
|
)
|
|
|
|
for i in range(4)
|
|
|
|
]
|
|
|
|
self.login("hamlet")
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/messages/flags",
|
|
|
|
{"messages": orjson.dumps(message_ids).decode(), "op": "add", "flag": "read"},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
for message_id in message_ids:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
messages_to_unread = message_ids[2:]
|
|
|
|
messages_still_read = message_ids[:2]
|
|
|
|
|
|
|
|
params = {
|
|
|
|
"messages": orjson.dumps(messages_to_unread).decode(),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read",
|
|
|
|
}
|
|
|
|
|
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# Use the tornado_redirected_to_list context manager to capture
|
|
|
|
# events.
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
|
|
|
result = self.api_post(receiver, "/api/v1/messages/flags", params)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
event = events[0]["event"]
|
|
|
|
self.assertEqual(event["messages"], messages_to_unread)
|
2022-03-25 03:13:54 +01:00
|
|
|
unread_message_ids = {str(message_id) for message_id in messages_to_unread}
|
2021-06-09 13:31:39 +02:00
|
|
|
self.assertSetEqual(set(event["message_details"].keys()), unread_message_ids)
|
|
|
|
for message_id in event["message_details"]:
|
2022-07-19 22:37:30 +02:00
|
|
|
self.assertEqual(event["message_details"][message_id]["mentioned"], True)
|
2021-06-09 13:31:39 +02:00
|
|
|
|
|
|
|
for message_id in messages_to_unread:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
for message_id in messages_still_read:
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=receiver.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertTrue(um.flags.read)
|