2020-06-29 13:19:17 +02:00
|
|
|
from typing import Any, List, Mapping, Set
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import ujson
|
2017-09-01 13:15:32 +02:00
|
|
|
from django.db import connection
|
2016-06-03 07:59:00 +02:00
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
from zerver.lib.fix_unreads import fix, fix_unsubscribed
|
2020-06-29 13:19:17 +02:00
|
|
|
from zerver.lib.message import (
|
|
|
|
MessageDict,
|
|
|
|
UnreadMessagesResult,
|
|
|
|
aggregate_unread_data,
|
|
|
|
apply_unread_message_event,
|
|
|
|
get_raw_unread_data,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import get_subscription, tornado_redirected_to_list
|
2017-09-01 13:15:32 +02:00
|
|
|
from zerver.lib.topic_mutes import add_topic_mute
|
2020-06-29 13:19:17 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
)
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
class FirstUnreadAnchorTests(ZulipTestCase):
|
|
|
|
'''
|
|
|
|
HISTORICAL NOTE:
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
The two tests in this class were originally written when
|
|
|
|
we had the concept of a "pointer", and they may be a bit
|
|
|
|
redundant in what they now check.
|
|
|
|
'''
|
|
|
|
def test_use_first_unread_anchor(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-08-04 20:26:38 +02:00
|
|
|
|
2017-03-19 01:46:39 +01:00
|
|
|
# Mark all existing messages as read
|
2017-08-04 20:26:38 +02:00
|
|
|
result = self.client_post("/json/mark_all_as_read")
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Send a new message (this will be unread)
|
2020-03-07 11:43:05 +01:00
|
|
|
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona",
|
2017-10-28 18:32:51 +02:00
|
|
|
"test")
|
2017-03-19 01:46:39 +01:00
|
|
|
|
|
|
|
# If we call get_messages with use_first_unread_anchor=True, we
|
|
|
|
# should get the message we just sent
|
2020-01-29 03:29:15 +01:00
|
|
|
messages_response = self.get_messages_response(
|
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
|
|
|
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
|
|
|
|
self.assertEqual(messages_response['anchor'], new_message_id)
|
|
|
|
|
|
|
|
# Test with the old way of expressing use_first_unread_anchor=True
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2017-03-19 01:46:39 +01:00
|
|
|
anchor=0, num_before=0, num_after=1, use_first_unread_anchor=True)
|
2018-02-14 04:44:41 +01:00
|
|
|
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
|
|
|
|
self.assertEqual(messages_response['anchor'], new_message_id)
|
2017-03-19 01:46:39 +01:00
|
|
|
|
2020-01-29 03:29:15 +01:00
|
|
|
# We want to get the message_id of an arbitrary old message. We can
|
2017-03-19 01:46:39 +01:00
|
|
|
# call get_messages with use_first_unread_anchor=False and simply
|
|
|
|
# save the first message we're returned.
|
|
|
|
messages = self.get_messages(
|
|
|
|
anchor=0, num_before=0, num_after=2, use_first_unread_anchor=False)
|
|
|
|
old_message_id = messages[0]['id']
|
|
|
|
|
|
|
|
# Verify the message is marked as read
|
|
|
|
user_message = UserMessage.objects.get(
|
|
|
|
message_id=old_message_id,
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile=self.example_user('hamlet'))
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assertTrue(user_message.flags.read)
|
|
|
|
|
|
|
|
# Let's set this old message to be unread
|
|
|
|
result = self.client_post("/json/messages/flags",
|
|
|
|
{"messages": ujson.dumps([old_message_id]),
|
|
|
|
"op": "remove",
|
|
|
|
"flag": "read"})
|
|
|
|
|
|
|
|
# Verify it's now marked as unread
|
|
|
|
user_message = UserMessage.objects.get(
|
|
|
|
message_id=old_message_id,
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile=self.example_user('hamlet'))
|
2017-03-19 01:46:39 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertFalse(user_message.flags.read)
|
|
|
|
|
|
|
|
# Now if we call get_messages with use_first_unread_anchor=True,
|
|
|
|
# we should get the old message we just set to unread
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
2018-02-14 04:44:41 +01:00
|
|
|
self.assertEqual(messages_response['messages'][0]['id'], old_message_id)
|
|
|
|
self.assertEqual(messages_response['anchor'], old_message_id)
|
2017-03-19 01:46:39 +01:00
|
|
|
|
2018-01-02 18:33:28 +01:00
|
|
|
def test_visible_messages_use_first_unread_anchor(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2018-01-02 18:33:28 +01:00
|
|
|
|
|
|
|
result = self.client_post("/json/mark_all_as_read")
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona",
|
2018-01-02 18:33:28 +01:00
|
|
|
"test")
|
|
|
|
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
2018-02-14 04:44:41 +01:00
|
|
|
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
|
|
|
|
self.assertEqual(messages_response['anchor'], new_message_id)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2020-06-22 23:32:53 +02:00
|
|
|
with mock.patch('zerver.views.message_fetch.get_first_visible_message_id',
|
|
|
|
return_value=new_message_id):
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_response = self.get_messages_response(
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
2018-02-14 04:44:41 +01:00
|
|
|
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
|
|
|
|
self.assertEqual(messages_response['anchor'], new_message_id)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2020-06-22 23:32:53 +02:00
|
|
|
with mock.patch('zerver.views.message_fetch.get_first_visible_message_id',
|
|
|
|
return_value=new_message_id + 1):
|
2018-02-14 04:44:41 +01:00
|
|
|
messages_reponse = self.get_messages_response(
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
2018-02-14 04:44:41 +01:00
|
|
|
self.assert_length(messages_reponse['messages'], 0)
|
|
|
|
self.assertIn('anchor', messages_reponse)
|
2018-01-02 18:33:28 +01:00
|
|
|
|
2020-06-22 23:32:53 +02:00
|
|
|
with mock.patch('zerver.views.message_fetch.get_first_visible_message_id',
|
|
|
|
return_value=new_message_id - 1):
|
2018-01-02 18:33:28 +01:00
|
|
|
messages = self.get_messages(
|
2020-01-29 03:29:15 +01:00
|
|
|
anchor="first_unread", num_before=0, num_after=1)
|
2018-01-02 18:33:28 +01:00
|
|
|
self.assert_length(messages, 1)
|
|
|
|
|
2020-06-26 19:51:10 +02:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class UnreadCountTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2018-12-11 07:05:40 +01:00
|
|
|
with mock.patch('zerver.lib.push_notifications.push_notifications_enabled',
|
|
|
|
return_value = True) as mock_push_notifications_enabled:
|
|
|
|
self.unread_msg_ids = [
|
|
|
|
self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("iago"), self.example_user("hamlet"), "hello"),
|
2018-12-11 07:05:40 +01:00
|
|
|
self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("iago"), self.example_user("hamlet"), "hello2")]
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications_enabled.assert_called()
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2016-06-04 20:28:02 +02:00
|
|
|
# Sending a new message results in unread UserMessages being created
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_new_message(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2014-02-07 23:10:14 +01:00
|
|
|
content = "Test message for unset read bit"
|
2020-03-07 11:43:05 +01:00
|
|
|
last_msg = self.send_stream_message(self.example_user("hamlet"), "Verona", content)
|
2014-02-07 23:10:14 +01:00
|
|
|
user_messages = list(UserMessage.objects.filter(message=last_msg))
|
|
|
|
self.assertEqual(len(user_messages) > 0, True)
|
|
|
|
for um in user_messages:
|
|
|
|
self.assertEqual(um.message.content, content)
|
2017-05-25 01:40:26 +02:00
|
|
|
if um.user_profile.email != self.example_email("hamlet"):
|
2014-02-07 23:10:14 +01:00
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_flags(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2014-02-07 23:10:14 +01:00
|
|
|
|
2016-07-28 00:30:22 +02:00
|
|
|
result = self.client_post("/json/messages/flags",
|
2014-02-07 23:10:14 +01:00
|
|
|
{"messages": ujson.dumps(self.unread_msg_ids),
|
|
|
|
"op": "add",
|
|
|
|
"flag": "read"})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Ensure we properly set the flags
|
|
|
|
found = 0
|
2017-03-24 07:51:46 +01:00
|
|
|
for msg in self.get_messages():
|
2014-02-07 23:10:14 +01:00
|
|
|
if msg['id'] in self.unread_msg_ids:
|
|
|
|
self.assertEqual(msg['flags'], ['read'])
|
|
|
|
found += 1
|
|
|
|
self.assertEqual(found, 2)
|
|
|
|
|
2016-07-28 00:30:22 +02:00
|
|
|
result = self.client_post("/json/messages/flags",
|
2014-02-07 23:10:14 +01:00
|
|
|
{"messages": ujson.dumps([self.unread_msg_ids[1]]),
|
|
|
|
"op": "remove", "flag": "read"})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Ensure we properly remove just one flag
|
2017-03-24 07:51:46 +01:00
|
|
|
for msg in self.get_messages():
|
2014-02-07 23:10:14 +01:00
|
|
|
if msg['id'] == self.unread_msg_ids[0]:
|
|
|
|
self.assertEqual(msg['flags'], ['read'])
|
|
|
|
elif msg['id'] == self.unread_msg_ids[1]:
|
|
|
|
self.assertEqual(msg['flags'], [])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_stream_read(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-08-25 06:01:29 +02:00
|
|
|
stream = self.subscribe(user_profile, "test_stream")
|
|
|
|
self.subscribe(self.example_user("cordelia"), "test_stream")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello")
|
|
|
|
unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
events: List[Mapping[str, Any]] = []
|
2016-05-08 15:20:51 +02:00
|
|
|
with tornado_redirected_to_list(events):
|
2017-08-08 16:11:45 +02:00
|
|
|
result = self.client_post("/json/mark_stream_as_read", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"stream_id": stream.id,
|
2017-08-08 16:11:45 +02:00
|
|
|
})
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertTrue(len(events) == 1)
|
|
|
|
|
|
|
|
event = events[0]['event']
|
|
|
|
expected = dict(operation='add',
|
|
|
|
messages=[message_id],
|
|
|
|
flag='read',
|
|
|
|
type='update_message_flags',
|
|
|
|
all=False)
|
|
|
|
|
|
|
|
differences = [key for key in expected if expected[key] != event[key]]
|
|
|
|
self.assertTrue(len(differences) == 0)
|
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
2016-05-08 15:20:51 +02:00
|
|
|
um = list(UserMessage.objects.filter(message=message_id))
|
|
|
|
for msg in um:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile.email == hamlet.email:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertTrue(msg.flags.read)
|
|
|
|
else:
|
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
|
|
|
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
|
|
|
|
for msg in unrelated_messages:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile.email == hamlet.email:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_invalid_stream_read(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-08-15 19:34:15 +02:00
|
|
|
invalid_stream_id = "12345678"
|
2017-08-08 16:11:45 +02:00
|
|
|
result = self.client_post("/json/mark_stream_as_read", {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"stream_id": invalid_stream_id,
|
2017-08-08 16:11:45 +02:00
|
|
|
})
|
2017-08-15 19:34:15 +02:00
|
|
|
self.assert_json_error(result, 'Invalid stream id')
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_topics_unread_with_invalid_stream_name(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-08-15 19:28:32 +02:00
|
|
|
invalid_stream_id = "12345678"
|
2017-08-08 16:11:45 +02:00
|
|
|
result = self.client_post("/json/mark_topic_as_read", {
|
2017-08-15 19:28:32 +02:00
|
|
|
"stream_id": invalid_stream_id,
|
2017-08-08 16:11:45 +02:00
|
|
|
'topic_name': 'whatever',
|
|
|
|
})
|
2017-08-15 19:28:32 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream id")
|
2017-08-08 16:11:45 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_stream_topic_read(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(user_profile, "test_stream")
|
2016-05-08 15:20:51 +02:00
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello", "test_topic")
|
|
|
|
unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello", "Denmark2")
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
events: List[Mapping[str, Any]] = []
|
2016-05-08 15:20:51 +02:00
|
|
|
with tornado_redirected_to_list(events):
|
2017-08-08 16:11:45 +02:00
|
|
|
result = self.client_post("/json/mark_topic_as_read", {
|
2017-08-15 19:28:32 +02:00
|
|
|
"stream_id": get_stream("test_stream", user_profile.realm).id,
|
2017-08-08 16:11:45 +02:00
|
|
|
"topic_name": "test_topic",
|
|
|
|
})
|
2016-05-08 15:20:51 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertTrue(len(events) == 1)
|
|
|
|
|
|
|
|
event = events[0]['event']
|
|
|
|
expected = dict(operation='add',
|
|
|
|
messages=[message_id],
|
|
|
|
flag='read',
|
|
|
|
type='update_message_flags',
|
|
|
|
all=False)
|
|
|
|
|
|
|
|
differences = [key for key in expected if expected[key] != event[key]]
|
|
|
|
self.assertTrue(len(differences) == 0)
|
|
|
|
|
|
|
|
um = list(UserMessage.objects.filter(message=message_id))
|
|
|
|
for msg in um:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile_id == user_profile.id:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertTrue(msg.flags.read)
|
|
|
|
|
|
|
|
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
|
|
|
|
for msg in unrelated_messages:
|
2020-03-12 14:17:25 +01:00
|
|
|
if msg.user_profile_id == user_profile.id:
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assertFalse(msg.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mark_all_in_invalid_topic_read(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2016-05-08 15:20:51 +02:00
|
|
|
invalid_topic_name = "abc"
|
2017-08-08 16:11:45 +02:00
|
|
|
result = self.client_post("/json/mark_topic_as_read", {
|
2017-08-15 19:28:32 +02:00
|
|
|
"stream_id": get_stream("Denmark", get_realm("zulip")).id,
|
2017-08-08 16:11:45 +02:00
|
|
|
"topic_name": invalid_topic_name,
|
|
|
|
})
|
2016-05-08 15:20:51 +02:00
|
|
|
self.assert_json_error(result, 'No such topic \'abc\'')
|
2017-09-01 13:15:32 +02:00
|
|
|
|
|
|
|
class FixUnreadTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_fix_unreads(self) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def send_message(stream_name: str, topic_name: str) -> int:
|
2017-10-28 18:32:51 +02:00
|
|
|
msg_id = self.send_stream_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("othello"),
|
2017-09-01 13:15:32 +02:00
|
|
|
stream_name,
|
2017-10-28 18:32:51 +02:00
|
|
|
topic_name=topic_name)
|
2017-09-01 13:15:32 +02:00
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile=user,
|
|
|
|
message_id=msg_id)
|
|
|
|
return um.id
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def assert_read(user_message_id: int) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
um = UserMessage.objects.get(id=user_message_id)
|
|
|
|
self.assertTrue(um.flags.read)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def assert_unread(user_message_id: int) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
um = UserMessage.objects.get(id=user_message_id)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def mute_stream(stream_name: str) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
stream = get_stream(stream_name, realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient = stream.recipient
|
2017-09-01 13:15:32 +02:00
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
recipient=recipient,
|
2017-09-01 13:15:32 +02:00
|
|
|
)
|
2018-08-02 23:46:05 +02:00
|
|
|
subscription.is_muted = True
|
2017-09-01 13:15:32 +02:00
|
|
|
subscription.save()
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def mute_topic(stream_name: str, topic_name: str) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
stream = get_stream(stream_name, realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient = stream.recipient
|
2017-09-01 13:15:32 +02:00
|
|
|
|
|
|
|
add_topic_mute(
|
|
|
|
user_profile=user,
|
|
|
|
stream_id=stream.id,
|
|
|
|
recipient_id=recipient.id,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
|
2018-05-10 19:00:29 +02:00
|
|
|
def force_unsubscribe(stream_name: str) -> None:
|
2017-09-01 13:15:32 +02:00
|
|
|
'''
|
|
|
|
We don't want side effects here, since the eventual
|
|
|
|
unsubscribe path may mark messages as read, defeating
|
|
|
|
the test setup here.
|
|
|
|
'''
|
|
|
|
sub = get_subscription(stream_name, user)
|
|
|
|
sub.active = False
|
|
|
|
sub.save()
|
|
|
|
|
|
|
|
# The data setup here is kind of funny, because some of these
|
|
|
|
# conditions should not actually happen in practice going forward,
|
|
|
|
# but we may have had bad data from the past.
|
|
|
|
|
|
|
|
mute_stream('Denmark')
|
|
|
|
mute_topic('Verona', 'muted_topic')
|
|
|
|
|
|
|
|
um_normal_id = send_message('Verona', 'normal')
|
|
|
|
um_muted_topic_id = send_message('Verona', 'muted_topic')
|
|
|
|
um_muted_stream_id = send_message('Denmark', 'whatever')
|
|
|
|
|
|
|
|
self.subscribe(user, 'temporary')
|
|
|
|
um_unsubscribed_id = send_message('temporary', 'whatever')
|
|
|
|
force_unsubscribe('temporary')
|
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
# Verify the setup
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_normal_id)
|
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
|
|
|
assert_unread(um_unsubscribed_id)
|
|
|
|
|
|
|
|
# fix unsubscribed
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
fix_unsubscribed(cursor, user)
|
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
# Muted messages don't change.
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
2020-06-18 09:23:13 +02:00
|
|
|
assert_unread(um_normal_id)
|
2017-09-01 13:15:32 +02:00
|
|
|
|
|
|
|
# The unsubscribed entry should change.
|
|
|
|
assert_read(um_unsubscribed_id)
|
|
|
|
|
|
|
|
# test idempotency
|
2017-09-03 20:24:56 +02:00
|
|
|
fix(user)
|
2017-09-01 13:15:32 +02:00
|
|
|
|
2020-06-18 09:23:13 +02:00
|
|
|
assert_unread(um_normal_id)
|
2017-09-01 13:15:32 +02:00
|
|
|
assert_unread(um_muted_topic_id)
|
|
|
|
assert_unread(um_muted_stream_id)
|
|
|
|
assert_read(um_unsubscribed_id)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
class PushNotificationMarkReadFlowsTest(ZulipTestCase):
|
|
|
|
def get_mobile_push_notification_ids(self, user_profile: UserProfile) -> List[int]:
|
|
|
|
return list(UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
2020-06-09 00:01:41 +02:00
|
|
|
).extra(
|
|
|
|
where=[UserMessage.where_active_push_notification()],
|
|
|
|
).order_by("message_id").values_list("message_id", flat=True))
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2018-12-11 07:05:40 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value=True)
|
|
|
|
def test_track_active_mobile_push_notifications(self, mock_push_notifications: mock.MagicMock) -> None:
|
|
|
|
mock_push_notifications.return_value = True
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2018-08-02 01:29:06 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
stream = self.subscribe(user_profile, "test_stream")
|
|
|
|
second_stream = self.subscribe(user_profile, "second_stream")
|
|
|
|
|
|
|
|
property_name = "push_notifications"
|
2020-03-10 11:48:26 +01:00
|
|
|
result = self.api_post(user_profile, "/api/v1/users/me/subscriptions/properties",
|
2018-08-02 01:29:06 +02:00
|
|
|
{"subscription_data": ujson.dumps([{"property": property_name,
|
|
|
|
"value": True,
|
|
|
|
"stream_id": stream.id}])})
|
2020-03-10 11:48:26 +01:00
|
|
|
result = self.api_post(user_profile, "/api/v1/users/me/subscriptions/properties",
|
2018-08-02 01:29:06 +02:00
|
|
|
{"subscription_data": ujson.dumps([{"property": property_name,
|
|
|
|
"value": True,
|
|
|
|
"stream_id": second_stream.id}])})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile), [])
|
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "test_topic")
|
|
|
|
second_message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "other_topic")
|
|
|
|
third_message_id = self.send_stream_message(self.example_user("cordelia"), "second_stream", "hello", "test_topic")
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[message_id, second_message_id, third_message_id])
|
|
|
|
|
|
|
|
result = self.client_post("/json/mark_topic_as_read", {
|
|
|
|
"stream_id": str(stream.id),
|
|
|
|
"topic_name": "test_topic",
|
|
|
|
})
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[second_message_id, third_message_id])
|
|
|
|
|
|
|
|
result = self.client_post("/json/mark_stream_as_read", {
|
|
|
|
"stream_id": str(stream.id),
|
|
|
|
"topic_name": "test_topic",
|
|
|
|
})
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[third_message_id])
|
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
fourth_message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "test_topic")
|
2018-08-02 01:29:06 +02:00
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[third_message_id, fourth_message_id])
|
|
|
|
|
|
|
|
result = self.client_post("/json/mark_all_as_read", {})
|
|
|
|
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
|
|
|
|
[])
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called()
|
2020-06-29 13:19:17 +02:00
|
|
|
|
|
|
|
class GetUnreadMsgsTest(ZulipTestCase):
|
|
|
|
def mute_stream(self, user_profile: UserProfile, stream: Stream) -> None:
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
|
|
|
subscription.is_muted = True
|
|
|
|
subscription.save()
|
|
|
|
|
|
|
|
def mute_topic(self, user_profile: UserProfile, stream_name: str,
|
|
|
|
topic_name: str) -> None:
|
|
|
|
realm = user_profile.realm
|
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
recipient = stream.recipient
|
|
|
|
|
|
|
|
add_topic_mute(
|
|
|
|
user_profile=user_profile,
|
|
|
|
stream_id=stream.id,
|
|
|
|
recipient_id=recipient.id,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_stream(self) -> None:
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
for stream_name in ['social', 'devel', 'test here']:
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
|
|
|
|
all_message_ids: Set[int] = set()
|
|
|
|
message_ids = dict()
|
|
|
|
|
|
|
|
tups = [
|
|
|
|
('social', 'lunch'),
|
|
|
|
('test here', 'bla'),
|
|
|
|
('devel', 'python'),
|
|
|
|
('devel', 'ruby'),
|
|
|
|
]
|
|
|
|
|
|
|
|
for stream_name, topic_name in tups:
|
|
|
|
message_ids[topic_name] = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=cordelia,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
) for i in range(3)
|
|
|
|
]
|
|
|
|
all_message_ids |= set(message_ids[topic_name])
|
|
|
|
|
|
|
|
self.assertEqual(len(all_message_ids), 12) # sanity check on test setup
|
|
|
|
|
|
|
|
self.mute_stream(
|
|
|
|
user_profile=hamlet,
|
|
|
|
stream=get_stream('test here', realm),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.mute_topic(
|
|
|
|
user_profile=hamlet,
|
|
|
|
stream_name='devel',
|
|
|
|
topic_name='ruby',
|
|
|
|
)
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
stream_dict = raw_unread_data['stream_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(stream_dict.keys()),
|
|
|
|
all_message_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
raw_unread_data['unmuted_stream_msgs'],
|
|
|
|
set(message_ids['python']) | set(message_ids['lunch']),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
stream_dict[message_ids['lunch'][0]],
|
|
|
|
dict(
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
stream_id=get_stream('social', realm).id,
|
|
|
|
topic='lunch',
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_huddle(self) -> None:
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
prospero = self.example_user('prospero')
|
|
|
|
|
|
|
|
huddle1_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia,
|
|
|
|
[hamlet, othello],
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
huddle2_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia,
|
|
|
|
[hamlet, prospero],
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_dict = raw_unread_data['huddle_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(huddle_dict.keys()),
|
|
|
|
set(huddle1_message_ids) | set(huddle2_message_ids),
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_string = ','.join(
|
|
|
|
str(uid)
|
|
|
|
for uid in sorted([cordelia.id, hamlet.id, othello.id])
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
huddle_dict[huddle1_message_ids[0]],
|
|
|
|
dict(user_ids_string=huddle_string),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_personal(self) -> None:
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
|
|
|
|
cordelia_pm_message_ids = [
|
|
|
|
self.send_personal_message(cordelia, hamlet)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
othello_pm_message_ids = [
|
|
|
|
self.send_personal_message(othello, hamlet)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
pm_dict = raw_unread_data['pm_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
set(cordelia_pm_message_ids) | set(othello_pm_message_ids),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[cordelia_pm_message_ids[0]],
|
|
|
|
dict(sender_id=cordelia.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_raw_unread_personal_from_self(self) -> None:
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
|
|
|
|
def send_unread_pm(other_user: UserProfile) -> Message:
|
|
|
|
# It is rare to send a message from Hamlet to Othello
|
|
|
|
# (or any other user) and have it be unread for
|
|
|
|
# Hamlet himself, but that is actually normal
|
|
|
|
# behavior for most API clients.
|
|
|
|
message_id = self.send_personal_message(
|
|
|
|
from_user=hamlet,
|
|
|
|
to_user=other_user,
|
|
|
|
sending_client_name='some_api_program',
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check our test setup is correct--the message should
|
|
|
|
# not have looked like it was sent by a human.
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
self.assertFalse(message.sent_by_human())
|
|
|
|
|
|
|
|
# And since it was not sent by a human, it should not
|
|
|
|
# be read, not even by the sender (Hamlet).
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
message_id=message_id,
|
|
|
|
)
|
|
|
|
self.assertFalse(um.flags.read)
|
|
|
|
|
|
|
|
return message
|
|
|
|
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
othello_msg = send_unread_pm(other_user=othello)
|
|
|
|
|
|
|
|
# And now check the unread data structure...
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
pm_dict = raw_unread_data['pm_dict']
|
|
|
|
|
|
|
|
self.assertEqual(set(pm_dict.keys()), {othello_msg.id})
|
|
|
|
|
|
|
|
# For legacy reason we call the field `sender_id` here,
|
|
|
|
# but it really refers to the other user id in the conversation,
|
|
|
|
# which is Othello.
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[othello_msg.id],
|
|
|
|
dict(sender_id=othello.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
cordelia_msg = send_unread_pm(other_user=cordelia)
|
|
|
|
|
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile=hamlet,
|
|
|
|
state=raw_unread_data,
|
|
|
|
message=MessageDict.wide_dict(cordelia_msg),
|
|
|
|
flags=[],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Again, `sender_id` is misnamed here.
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[cordelia_msg.id],
|
|
|
|
dict(sender_id=cordelia.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Send a message to ourself.
|
|
|
|
hamlet_msg = send_unread_pm(other_user=hamlet)
|
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile=hamlet,
|
|
|
|
state=raw_unread_data,
|
|
|
|
message=MessageDict.wide_dict(hamlet_msg),
|
|
|
|
flags=[],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id, hamlet_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Again, `sender_id` is misnamed here.
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[hamlet_msg.id],
|
|
|
|
dict(sender_id=hamlet.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Call get_raw_unread_data again.
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
pm_dict = raw_unread_data['pm_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
{othello_msg.id, cordelia_msg.id, hamlet_msg.id},
|
|
|
|
)
|
|
|
|
|
|
|
|
# Again, `sender_id` is misnamed here.
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[hamlet_msg.id],
|
|
|
|
dict(sender_id=hamlet.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_unread_msgs(self) -> None:
|
|
|
|
sender = self.example_user('cordelia')
|
|
|
|
sender_id = sender.id
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
|
|
|
|
pm1_message_id = self.send_personal_message(sender, user_profile, "hello1")
|
|
|
|
pm2_message_id = self.send_personal_message(sender, user_profile, "hello2")
|
|
|
|
|
|
|
|
muted_stream = self.subscribe(user_profile, 'Muted Stream')
|
|
|
|
self.mute_stream(user_profile, muted_stream)
|
|
|
|
self.mute_topic(user_profile, 'Denmark', 'muted-topic')
|
|
|
|
|
|
|
|
stream_message_id = self.send_stream_message(sender, "Denmark", "hello")
|
|
|
|
muted_stream_message_id = self.send_stream_message(sender, "Muted Stream", "hello")
|
|
|
|
muted_topic_message_id = self.send_stream_message(
|
|
|
|
sender,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="muted-topic",
|
|
|
|
content="hello",
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_message_id = self.send_huddle_message(
|
|
|
|
sender,
|
|
|
|
[user_profile, othello],
|
|
|
|
'hello3',
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_unread_data() -> UnreadMessagesResult:
|
|
|
|
raw_unread_data = get_raw_unread_data(user_profile)
|
|
|
|
aggregated_data = aggregate_unread_data(raw_unread_data)
|
|
|
|
return aggregated_data
|
|
|
|
|
|
|
|
result = get_unread_data()
|
|
|
|
|
|
|
|
# The count here reflects the count of unread messages that we will
|
|
|
|
# report to users in the bankruptcy dialog, and for now it excludes unread messages
|
|
|
|
# from muted treams, but it doesn't exclude unread messages from muted topics yet.
|
|
|
|
self.assertEqual(result['count'], 4)
|
|
|
|
|
|
|
|
unread_pm = result['pms'][0]
|
|
|
|
self.assertEqual(unread_pm['sender_id'], sender_id)
|
|
|
|
self.assertEqual(unread_pm['unread_message_ids'], [pm1_message_id, pm2_message_id])
|
|
|
|
self.assertTrue('sender_ids' not in unread_pm)
|
|
|
|
|
|
|
|
unread_stream = result['streams'][0]
|
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream['topic'], 'muted-topic')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [muted_topic_message_id])
|
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
|
|
|
|
|
|
|
unread_stream = result['streams'][1]
|
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream['topic'], 'test')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [stream_message_id])
|
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
|
|
|
|
|
|
|
unread_stream = result['streams'][2]
|
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Muted Stream', user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream['topic'], 'test')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [muted_stream_message_id])
|
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
|
|
|
|
|
|
|
huddle_string = ','.join(str(uid) for uid in sorted([sender_id, user_profile.id, othello.id]))
|
|
|
|
|
|
|
|
unread_huddle = result['huddles'][0]
|
|
|
|
self.assertEqual(unread_huddle['user_ids_string'], huddle_string)
|
|
|
|
self.assertEqual(unread_huddle['unread_message_ids'], [huddle_message_id])
|
|
|
|
self.assertTrue('sender_ids' not in unread_huddle)
|
|
|
|
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=stream_message_id,
|
|
|
|
)
|
|
|
|
um.flags |= UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [stream_message_id])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
# TODO: This should change when we make alert words work better.
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [stream_message_id])
|
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
# Test with a muted stream
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=muted_stream_message_id,
|
|
|
|
)
|
|
|
|
um.flags = UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [muted_stream_message_id])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
# Test with a muted topic
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=muted_topic_message_id,
|
|
|
|
)
|
|
|
|
um.flags = UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [muted_topic_message_id])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.has_alert_word
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um.flags = UserMessage.flags.wildcard_mentioned
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um.flags = 0
|
|
|
|
um.save()
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [])
|