2018-03-09 00:27:01 +01:00
|
|
|
import time
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2023-11-28 19:16:58 +01:00
|
|
|
import time_machine
|
2018-03-09 00:16:44 +01:00
|
|
|
from django.test import override_settings
|
2018-03-09 00:27:01 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user
|
2022-05-18 11:14:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_set_realm_property
|
2022-04-15 22:47:51 +02:00
|
|
|
from zerver.actions.users import do_deactivate_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.digest import (
|
2021-02-04 19:48:40 +01:00
|
|
|
DigestTopic,
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm,
|
2020-11-04 23:40:29 +01:00
|
|
|
bulk_handle_digest_email,
|
2020-11-12 13:01:07 +01:00
|
|
|
bulk_write_realm_audit_logs,
|
2020-06-11 00:54:34 +02:00
|
|
|
enqueue_emails,
|
|
|
|
gather_new_streams,
|
2021-02-04 19:48:40 +01:00
|
|
|
get_hot_topics,
|
2023-08-31 20:37:08 +02:00
|
|
|
get_recent_topics,
|
2023-08-31 19:37:49 +02:00
|
|
|
get_recently_created_streams,
|
2023-08-31 19:32:17 +02:00
|
|
|
get_user_stream_map,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2020-11-12 12:11:35 +01:00
|
|
|
from zerver.lib.message import get_last_message_id
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2018-03-09 00:27:01 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models import Message, Realm, RealmAuditLog, Stream, UserActivityInterval, UserProfile
|
2024-09-03 15:33:25 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2018-03-09 00:27:01 +01:00
|
|
|
|
|
|
|
class TestDigestEmailMessages(ZulipTestCase):
|
2021-02-12 08:20:45 +01:00
|
|
|
@mock.patch("zerver.lib.digest.enough_traffic")
|
|
|
|
@mock.patch("zerver.lib.digest.send_future_email")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_multiple_stream_senders(
|
|
|
|
self, mock_send_future_email: mock.MagicMock, mock_enough_traffic: mock.MagicMock
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.subscribe(othello, "Verona")
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
one_day_ago = timezone_now() - timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2023-11-19 19:45:19 +01:00
|
|
|
one_hour_ago = timezone_now() - timedelta(seconds=3600)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2019-05-09 18:35:05 +02:00
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
senders = ["hamlet", "cordelia", "iago", "prospero", "ZOE"]
|
|
|
|
self.simulate_stream_conversation("Verona", senders)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2022-04-15 22:47:03 +02:00
|
|
|
# Remove RealmAuditLog rows, so we don't exclude polonius.
|
2020-11-17 13:31:08 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
2021-02-12 08:20:45 +01:00
|
|
|
one_click_unsubscribe_link(othello, "digest")
|
2023-08-31 20:37:08 +02:00
|
|
|
|
|
|
|
# Clear the LRU cache on the stream topics
|
|
|
|
get_recent_topics.cache_clear()
|
|
|
|
with self.assert_database_query_count(10):
|
2020-11-13 18:27:40 +01:00
|
|
|
bulk_handle_digest_email([othello.id], cutoff)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(kwargs["to_user_ids"], [othello.id])
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hot_convo = kwargs["context"]["hot_conversations"][0]
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_participants = {self.example_user(sender).full_name for sender in senders}
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(set(hot_convo["participants"]), expected_participants)
|
|
|
|
self.assertEqual(hot_convo["count"], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo["first_few_messages"][0]["senders"]
|
|
|
|
self.assertIn("some content", teaser_messages[0]["content"][0]["plain"])
|
|
|
|
self.assertIn(teaser_messages[0]["sender"], expected_participants)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2023-08-31 20:37:08 +02:00
|
|
|
# If we run another batch, we reuse the topic queries; there
|
|
|
|
# are 3 reused streams and one new one, for a net of two fewer
|
|
|
|
# than before.
|
|
|
|
iago = self.example_user("iago")
|
2024-03-26 13:58:30 +01:00
|
|
|
with self.assert_database_query_count(10):
|
2023-08-31 20:37:08 +02:00
|
|
|
bulk_handle_digest_email([iago.id], cutoff)
|
|
|
|
self.assertEqual(get_recent_topics.cache_info().hits, 3)
|
2024-03-26 13:58:30 +01:00
|
|
|
self.assertEqual(get_recent_topics.cache_info().currsize, 6)
|
2023-08-31 20:37:08 +02:00
|
|
|
|
|
|
|
# Two users in the same batch, with only one new stream from
|
|
|
|
# the above.
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
prospero = self.example_user("prospero")
|
|
|
|
with self.assert_database_query_count(9):
|
|
|
|
bulk_handle_digest_email([cordelia.id, prospero.id], cutoff)
|
|
|
|
self.assertEqual(get_recent_topics.cache_info().hits, 7)
|
2024-03-26 13:58:30 +01:00
|
|
|
self.assertEqual(get_recent_topics.cache_info().currsize, 7)
|
2023-08-31 20:37:08 +02:00
|
|
|
|
2023-09-08 21:37:28 +02:00
|
|
|
# If we use a different cutoff, it clears the cache.
|
2023-08-31 20:37:08 +02:00
|
|
|
with self.assert_database_query_count(12):
|
|
|
|
bulk_handle_digest_email([cordelia.id, prospero.id], cutoff + 1)
|
2023-09-08 21:37:28 +02:00
|
|
|
self.assertEqual(get_recent_topics.cache_info().hits, 1)
|
|
|
|
self.assertEqual(get_recent_topics.cache_info().currsize, 4)
|
2023-08-31 20:37:08 +02:00
|
|
|
|
2022-04-15 22:47:51 +02:00
|
|
|
def test_bulk_handle_digest_email_skips_deactivated_users(self) -> None:
|
|
|
|
"""
|
|
|
|
A user id may be added to the queue before the user is deactivated. In such a case,
|
|
|
|
the function responsible for sending the email should correctly skip them.
|
|
|
|
"""
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
user_ids = list(
|
|
|
|
UserProfile.objects.filter(is_bot=False, realm=realm).values_list("id", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
do_deactivate_user(hamlet, acting_user=None)
|
|
|
|
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
mock.patch("zerver.lib.digest.enough_traffic", return_value=True),
|
|
|
|
mock.patch("zerver.lib.digest.send_future_email") as mock_send_email,
|
|
|
|
):
|
2022-04-15 22:47:51 +02:00
|
|
|
bulk_handle_digest_email(user_ids, 1)
|
|
|
|
|
|
|
|
emailed_user_ids = [
|
|
|
|
call_args[1]["to_user_ids"][0] for call_args in mock_send_email.call_args_list
|
|
|
|
]
|
|
|
|
|
|
|
|
self.assertEqual(
|
2022-04-27 01:43:52 +02:00
|
|
|
set(emailed_user_ids), {user_id for user_id in user_ids if user_id != hamlet.id}
|
2022-04-15 22:47:51 +02:00
|
|
|
)
|
|
|
|
|
2023-09-08 21:32:43 +02:00
|
|
|
@mock.patch("zerver.lib.digest.send_future_email")
|
|
|
|
def test_enough_traffic(self, mock_send_future_email: mock.MagicMock) -> None:
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.subscribe(othello, "Verona")
|
|
|
|
|
|
|
|
in_the_future = timezone_now().timestamp() + 60
|
|
|
|
|
|
|
|
bulk_handle_digest_email([othello.id], in_the_future)
|
|
|
|
mock_send_future_email.assert_not_called()
|
|
|
|
|
|
|
|
with mock.patch(
|
|
|
|
"zerver.lib.digest.enough_traffic", return_value=True
|
|
|
|
) as enough_traffic_mock:
|
|
|
|
bulk_handle_digest_email([othello.id], in_the_future)
|
|
|
|
mock_send_future_email.assert_called()
|
|
|
|
enough_traffic_mock.assert_called_once_with([], 0)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@mock.patch("zerver.lib.digest.enough_traffic")
|
|
|
|
@mock.patch("zerver.lib.digest.send_future_email")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_guest_user_multiple_stream_sender(
|
|
|
|
self, mock_send_future_email: mock.MagicMock, mock_enough_traffic: mock.MagicMock
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
create_stream_if_needed(cordelia.realm, "web_public_stream", is_web_public=True)
|
|
|
|
self.subscribe(othello, "web_public_stream")
|
|
|
|
self.subscribe(hamlet, "web_public_stream")
|
|
|
|
self.subscribe(cordelia, "web_public_stream")
|
|
|
|
self.subscribe(polonius, "web_public_stream")
|
2020-07-24 19:41:27 +02:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
one_day_ago = timezone_now() - timedelta(days=1)
|
2020-07-24 19:41:27 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2023-11-19 19:45:19 +01:00
|
|
|
one_hour_ago = timezone_now() - timedelta(seconds=3600)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
senders = ["hamlet", "cordelia", "othello", "desdemona"]
|
|
|
|
self.simulate_stream_conversation("web_public_stream", senders)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
2020-11-17 13:31:08 +01:00
|
|
|
# Remove RealmAuditoLog rows, so we don't exclude polonius.
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2020-07-24 19:41:27 +02:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
2021-02-12 08:20:45 +01:00
|
|
|
one_click_unsubscribe_link(polonius, "digest")
|
2023-08-31 20:37:08 +02:00
|
|
|
get_recent_topics.cache_clear()
|
|
|
|
with self.assert_database_query_count(9):
|
2020-11-13 18:27:40 +01:00
|
|
|
bulk_handle_digest_email([polonius.id], cutoff)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(kwargs["to_user_ids"], [polonius.id])
|
2020-07-24 19:41:27 +02:00
|
|
|
|
2024-04-16 14:55:50 +02:00
|
|
|
new_stream_names = kwargs["context"]["new_channels"]["plain"]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue("web_public_stream" in new_stream_names)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
2022-05-18 11:14:15 +02:00
|
|
|
def test_no_logging(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
startlen = len(RealmAuditLog.objects.all())
|
|
|
|
bulk_write_realm_audit_logs([])
|
|
|
|
self.assert_length(RealmAuditLog.objects.all(), startlen)
|
|
|
|
bulk_write_realm_audit_logs([hamlet])
|
|
|
|
self.assert_length(RealmAuditLog.objects.all(), startlen + 1)
|
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
def test_soft_deactivated_user_multiple_stream_senders(self) -> None:
|
2023-11-19 19:45:19 +01:00
|
|
|
one_day_ago = timezone_now() - timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-03 22:11:59 +01:00
|
|
|
digest_users = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("othello"),
|
|
|
|
self.example_user("aaron"),
|
|
|
|
self.example_user("desdemona"),
|
|
|
|
self.example_user("polonius"),
|
2020-11-03 22:11:59 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
digest_users.sort(key=lambda user: user.id)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
|
|
|
for digest_user in digest_users:
|
2021-02-12 08:20:45 +01:00
|
|
|
for stream in ["Verona", "Scotland", "Denmark"]:
|
2020-11-03 22:11:59 +01:00
|
|
|
self.subscribe(digest_user, stream)
|
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
|
|
|
# Send messages to a stream and unsubscribe - subscribe from that stream
|
2021-02-12 08:20:45 +01:00
|
|
|
senders = ["hamlet", "cordelia", "iago", "prospero", "ZOE"]
|
|
|
|
self.simulate_stream_conversation("Verona", senders)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
|
|
|
for digest_user in digest_users:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.unsubscribe(digest_user, "Verona")
|
|
|
|
self.subscribe(digest_user, "Verona")
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
# Send messages to other streams
|
2021-02-12 08:20:45 +01:00
|
|
|
self.simulate_stream_conversation("Scotland", senders)
|
|
|
|
self.simulate_stream_conversation("Denmark", senders)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
one_hour_ago = timezone_now() - timedelta(seconds=3600)
|
2019-03-03 07:14:58 +01:00
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
2021-02-12 08:20:45 +01:00
|
|
|
one_click_unsubscribe_link(digest_users[0], "digest")
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.digest.send_future_email") as mock_send_future_email:
|
2020-11-04 23:40:29 +01:00
|
|
|
digest_user_ids = [user.id for user in digest_users]
|
2020-11-04 19:37:46 +01:00
|
|
|
|
2023-08-31 20:37:08 +02:00
|
|
|
get_recent_topics.cache_clear()
|
2024-07-14 20:30:42 +02:00
|
|
|
with self.assert_database_query_count(16), self.assert_memcached_count(0):
|
|
|
|
bulk_handle_digest_email(digest_user_ids, cutoff)
|
2020-11-04 19:40:42 +01:00
|
|
|
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(digest_users, mock_send_future_email.call_count)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
for i, digest_user in enumerate(digest_users):
|
|
|
|
kwargs = mock_send_future_email.call_args_list[i][1]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(kwargs["to_user_ids"], [digest_user.id])
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hot_conversations = kwargs["context"]["hot_conversations"]
|
2020-11-03 22:11:59 +01:00
|
|
|
self.assertEqual(2, len(hot_conversations), [digest_user.id])
|
|
|
|
|
|
|
|
hot_convo = hot_conversations[0]
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_participants = {self.example_user(sender).full_name for sender in senders}
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(set(hot_convo["participants"]), expected_participants)
|
|
|
|
self.assertEqual(hot_convo["count"], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo["first_few_messages"][0]["senders"]
|
|
|
|
self.assertIn("some content", teaser_messages[0]["content"][0]["plain"])
|
|
|
|
self.assertIn(teaser_messages[0]["sender"], expected_participants)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-12 12:11:35 +01:00
|
|
|
last_message_id = get_last_message_id()
|
|
|
|
for digest_user in digest_users:
|
|
|
|
log_rows = RealmAuditLog.objects.filter(
|
|
|
|
modified_user_id=digest_user.id,
|
2024-09-03 15:33:25 +02:00
|
|
|
event_type=AuditLogEventType.USER_DIGEST_EMAIL_CREATED,
|
2020-11-12 12:11:35 +01:00
|
|
|
)
|
|
|
|
(log,) = log_rows
|
|
|
|
self.assertEqual(log.event_last_message_id, last_message_id)
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
def test_streams_recently_modified_for_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-11-05 14:55:45 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for stream in ["Verona", "Scotland", "Denmark"]:
|
2019-03-03 07:14:58 +01:00
|
|
|
self.subscribe(othello, stream)
|
2020-11-05 14:55:45 +01:00
|
|
|
self.subscribe(cordelia, stream)
|
|
|
|
|
|
|
|
realm = othello.realm
|
2021-02-12 08:20:45 +01:00
|
|
|
verona = get_stream("Verona", realm)
|
2023-08-31 19:32:17 +02:00
|
|
|
scotland = get_stream("Scotland", realm)
|
|
|
|
denmark = get_stream("Denmark", realm)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def user_streams(user: UserProfile) -> set[Stream]:
|
2023-08-31 19:32:17 +02:00
|
|
|
data = get_user_stream_map([user.id], one_hour_ago)
|
|
|
|
return {Stream.objects.get(id=stream_id) for stream_id in data[user.id]}
|
2020-11-05 14:55:45 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
two_hours_ago = timezone_now() - timedelta(hours=2)
|
|
|
|
one_hour_ago = timezone_now() - timedelta(hours=1)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Delete all RealmAuditLogs to start with a clean slate.
|
2019-03-03 07:14:58 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2023-08-31 19:32:17 +02:00
|
|
|
# Othello's map is Verona, Scotland, and Denmark
|
|
|
|
self.assertEqual(user_streams(othello), {verona, scotland, denmark})
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Unsubscribe and subscribe Othello from a stream
|
2021-02-12 08:20:45 +01:00
|
|
|
self.unsubscribe(othello, "Denmark")
|
|
|
|
self.subscribe(othello, "Denmark")
|
2020-11-05 14:55:45 +01:00
|
|
|
|
2023-08-31 19:32:17 +02:00
|
|
|
# This drops denmark from the list of streams
|
|
|
|
self.assertEqual(user_streams(othello), {verona, scotland})
|
2020-11-05 14:55:45 +01:00
|
|
|
|
|
|
|
# Backdate all our logs (so that Denmark will no longer
|
|
|
|
# appear like a recently modified stream for Othello).
|
|
|
|
RealmAuditLog.objects.all().update(event_time=two_hours_ago)
|
|
|
|
|
|
|
|
# Now Denmark no longer appears recent to Othello.
|
2023-08-31 19:32:17 +02:00
|
|
|
self.assertEqual(user_streams(othello), {denmark, verona, scotland})
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
# Unsubscribe and subscribe from a stream
|
2021-02-12 08:20:45 +01:00
|
|
|
self.unsubscribe(othello, "Verona")
|
|
|
|
self.subscribe(othello, "Verona")
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Now, Verona, but not Denmark, appears recent.
|
2023-08-31 19:32:17 +02:00
|
|
|
self.assertEqual(user_streams(othello), {denmark, scotland})
|
2020-11-05 14:55:45 +01:00
|
|
|
|
|
|
|
# make sure we don't mix up Othello and Cordelia
|
2023-08-31 19:32:17 +02:00
|
|
|
streams = get_user_stream_map([othello.id, cordelia.id], one_hour_ago)
|
|
|
|
self.assertEqual(streams[othello.id], {scotland.id, denmark.id})
|
|
|
|
self.assertEqual(streams[cordelia.id], {verona.id, scotland.id, denmark.id})
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.unsubscribe(cordelia, "Denmark")
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2023-08-31 19:32:17 +02:00
|
|
|
streams = get_user_stream_map([othello.id, cordelia.id], one_hour_ago)
|
|
|
|
self.assertEqual(streams[othello.id], {scotland.id, denmark.id})
|
|
|
|
self.assertEqual(streams[cordelia.id], {verona.id, scotland.id})
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def active_human_users(self, realm: Realm) -> list[UserProfile]:
|
2021-02-12 08:19:30 +01:00
|
|
|
users = list(
|
|
|
|
UserProfile.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
is_active=True,
|
|
|
|
is_bot=False,
|
|
|
|
enable_digest_emails=True,
|
|
|
|
)
|
|
|
|
)
|
2020-11-12 13:01:07 +01:00
|
|
|
|
|
|
|
assert len(users) >= 5
|
|
|
|
|
|
|
|
return users
|
|
|
|
|
|
|
|
def test_twelve_hour_exemption(self) -> None:
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2020-11-12 13:01:07 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = timezone_now() - timedelta(days=5)
|
2020-11-12 13:01:07 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock:
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
users = self.active_human_users(realm)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(users, num_queued_users)
|
2020-11-12 13:01:07 +01:00
|
|
|
|
|
|
|
# Simulate that we have sent digests for all our users.
|
|
|
|
bulk_write_realm_audit_logs(users)
|
|
|
|
|
|
|
|
# Now if we run again, we won't get any users, since they will have
|
|
|
|
# recent RealmAuditLog rows.
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock:
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
self.assertEqual(queue_mock.call_count, 0)
|
|
|
|
|
2022-05-18 11:14:15 +02:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
|
|
|
@override_settings(SYSTEM_ONLY_REALMS=["zulipinternal"])
|
|
|
|
def test_enqueue_emails(self) -> None:
|
|
|
|
def call_enqueue_emails(realm: Realm) -> int:
|
|
|
|
do_set_realm_property(realm, "digest_emails_enabled", True, acting_user=None)
|
|
|
|
do_set_realm_property(
|
|
|
|
realm, "digest_weekday", timezone_now().weekday(), acting_user=None
|
|
|
|
)
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = timezone_now() - timedelta(days=0)
|
2024-04-16 20:49:37 +02:00
|
|
|
with mock.patch("zerver.worker.digest_emails.bulk_handle_digest_email") as queue_mock:
|
2022-05-18 11:14:15 +02:00
|
|
|
enqueue_emails(cutoff)
|
|
|
|
return 0 if queue_mock.call_args is None else len(queue_mock.call_args[0][0])
|
|
|
|
|
|
|
|
num_queued_users = call_enqueue_emails(get_realm("zulipinternal"))
|
|
|
|
self.assertEqual(num_queued_users, 0)
|
|
|
|
num_queued_users = call_enqueue_emails(get_realm("zulip"))
|
|
|
|
self.assertEqual(num_queued_users, 10)
|
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:27:04 +01:00
|
|
|
def test_inactive_users_queued_for_digest(self) -> None:
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.all().delete()
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2019-05-08 07:58:26 +02:00
|
|
|
# Turn on realm digest emails for all realms
|
|
|
|
Realm.objects.update(digest_emails_enabled=True)
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = timezone_now() - timedelta(days=5)
|
2020-11-12 14:27:04 +01:00
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
users = self.active_human_users(realm)
|
|
|
|
|
2022-02-08 00:13:33 +01:00
|
|
|
# Check that all users without a UserActivityInterval entry are considered
|
2018-03-09 00:27:01 +01:00
|
|
|
# inactive users and get enqueued.
|
2024-04-16 20:49:37 +02:00
|
|
|
with mock.patch("zerver.worker.digest_emails.bulk_handle_digest_email") as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(users, num_queued_users)
|
2020-11-12 14:27:04 +01:00
|
|
|
|
|
|
|
for user in users:
|
2023-11-19 19:45:19 +01:00
|
|
|
last_visit = timezone_now() - timedelta(days=1)
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.create(
|
|
|
|
start=last_visit,
|
|
|
|
end=last_visit,
|
2020-11-12 14:27:04 +01:00
|
|
|
user_profile=user,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now we expect no users, due to recent activity.
|
2024-04-16 20:49:37 +02:00
|
|
|
with mock.patch("zerver.worker.digest_emails.bulk_handle_digest_email") as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
self.assertEqual(queue_mock.call_count, 0)
|
|
|
|
|
|
|
|
# Now, backdate all our users activity.
|
2023-11-19 19:45:19 +01:00
|
|
|
last_visit = timezone_now() - timedelta(days=7)
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.all().update(start=last_visit, end=last_visit)
|
2020-11-12 14:27:04 +01:00
|
|
|
|
2024-04-16 20:49:37 +02:00
|
|
|
with mock.patch("zerver.worker.digest_emails.bulk_handle_digest_email") as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
2021-07-13 19:39:37 +02:00
|
|
|
self.assert_length(users, num_queued_users)
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
def tuesday(self) -> datetime:
|
|
|
|
return datetime(year=2016, month=1, day=5, tzinfo=timezone.utc)
|
2020-11-12 14:42:46 +01:00
|
|
|
|
|
|
|
@override_settings(SEND_DIGEST_EMAILS=False)
|
|
|
|
def test_disabled(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2018-03-09 00:16:44 +01:00
|
|
|
|
2020-11-12 14:42:46 +01:00
|
|
|
tuesday = self.tuesday()
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = tuesday - timedelta(days=5)
|
2020-11-12 14:42:46 +01:00
|
|
|
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
time_machine.travel(tuesday, tick=False),
|
|
|
|
mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock,
|
|
|
|
):
|
|
|
|
enqueue_emails(cutoff)
|
2020-11-12 14:42:46 +01:00
|
|
|
queue_mock.assert_not_called()
|
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:42:46 +01:00
|
|
|
def test_only_enqueue_on_valid_day(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
not_tuesday = datetime(year=2016, month=1, day=6, tzinfo=timezone.utc)
|
|
|
|
cutoff = not_tuesday - timedelta(days=5)
|
2020-11-12 14:42:46 +01:00
|
|
|
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
time_machine.travel(not_tuesday, tick=False),
|
|
|
|
mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock,
|
|
|
|
):
|
|
|
|
enqueue_emails(cutoff)
|
2020-11-12 14:42:46 +01:00
|
|
|
queue_mock.assert_not_called()
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:59:39 +01:00
|
|
|
def test_no_email_digest_for_bots(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2020-11-12 14:59:39 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = timezone_now() - timedelta(days=5)
|
2020-11-12 14:59:39 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2020-11-12 14:59:39 +01:00
|
|
|
realm.digest_emails_enabled = True
|
|
|
|
realm.save()
|
|
|
|
|
2020-07-16 14:10:43 +02:00
|
|
|
bot = do_create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
"some_bot@example.com",
|
|
|
|
"password",
|
2020-11-12 14:59:39 +01:00
|
|
|
realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
"some_bot",
|
2020-07-16 14:10:43 +02:00
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2020-07-16 14:10:43 +02:00
|
|
|
)
|
2018-03-09 00:27:01 +01:00
|
|
|
|
|
|
|
# Check that bots are not sent emails
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock:
|
2020-11-12 14:59:39 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
|
|
|
assert num_queued_users >= 5
|
2020-11-12 14:59:39 +01:00
|
|
|
|
|
|
|
for arg in queue_mock.call_args_list:
|
2020-11-13 18:13:13 +01:00
|
|
|
user_ids = arg[0][0]
|
|
|
|
for user_id in user_ids:
|
|
|
|
self.assertNotEqual(user_id, bot.id)
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 15:17:33 +01:00
|
|
|
def test_new_stream_link(self) -> None:
|
|
|
|
Stream.objects.all().delete()
|
2023-11-19 19:45:19 +01:00
|
|
|
cutoff = timezone_now() - timedelta(days=5)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
stream = create_stream_if_needed(cordelia.realm, "New stream")[0]
|
2020-11-12 15:17:33 +01:00
|
|
|
stream.date_created = timezone_now()
|
|
|
|
stream.save()
|
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
realm = cordelia.realm
|
|
|
|
|
2023-08-31 19:37:49 +02:00
|
|
|
recently_created_streams = get_recently_created_streams(realm, cutoff)
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_count, stream_info = gather_new_streams(
|
2023-08-31 19:37:49 +02:00
|
|
|
realm, recently_created_streams, can_access_public=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-11-12 15:17:33 +01:00
|
|
|
self.assertEqual(stream_count, 1)
|
2024-10-04 16:54:16 +02:00
|
|
|
expected_html = f"<a href='http://zulip.testserver/#narrow/channel/{stream.id}-New-stream'>New stream</a>"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(stream_info["html"][0], expected_html)
|
2020-11-12 15:17:33 +01:00
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
# guests don't see our stream
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_count, stream_info = gather_new_streams(
|
2023-08-31 19:37:49 +02:00
|
|
|
realm, recently_created_streams, can_access_public=False
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-11-13 17:25:52 +01:00
|
|
|
self.assertEqual(stream_count, 0)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(stream_info["html"], [])
|
2020-11-13 17:25:52 +01:00
|
|
|
|
2022-01-29 00:54:13 +01:00
|
|
|
# but they do if we make it web-public
|
2020-11-13 17:25:52 +01:00
|
|
|
stream.is_web_public = True
|
|
|
|
stream.save()
|
|
|
|
|
2023-08-31 19:37:49 +02:00
|
|
|
recently_created_streams = get_recently_created_streams(realm, cutoff)
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_count, stream_info = gather_new_streams(
|
2023-08-31 19:37:49 +02:00
|
|
|
realm, recently_created_streams, can_access_public=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-11-13 17:25:52 +01:00
|
|
|
self.assertEqual(stream_count, 1)
|
|
|
|
|
2020-11-12 15:17:33 +01:00
|
|
|
# Make the stream appear to be older.
|
2023-11-19 19:45:19 +01:00
|
|
|
stream.date_created = timezone_now() - timedelta(days=7)
|
2020-11-12 15:17:33 +01:00
|
|
|
stream.save()
|
|
|
|
|
2023-08-31 19:37:49 +02:00
|
|
|
recently_created_streams = get_recently_created_streams(realm, cutoff)
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_count, stream_info = gather_new_streams(
|
2023-08-31 19:37:49 +02:00
|
|
|
realm, recently_created_streams, can_access_public=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-11-12 15:17:33 +01:00
|
|
|
self.assertEqual(stream_count, 0)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(stream_info["html"], [])
|
2018-06-02 15:45:27 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def simulate_stream_conversation(self, stream: str, senders: list[str]) -> list[int]:
|
2019-03-03 07:14:58 +01:00
|
|
|
message_ids = [] # List[int]
|
|
|
|
for sender_name in senders:
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user(sender_name)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(sender, stream)
|
2021-02-12 08:20:45 +01:00
|
|
|
content = f"some content for {stream} from {sender_name}"
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(sender, stream, content)
|
2019-03-03 07:14:58 +01:00
|
|
|
message_ids.append(message_id)
|
|
|
|
return message_ids
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-12 22:09:34 +02:00
|
|
|
class TestDigestContentInBrowser(ZulipTestCase):
|
|
|
|
def test_get_digest_content_in_browser(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-08-12 22:09:34 +02:00
|
|
|
result = self.client_get("/digest/")
|
2019-05-01 06:08:05 +02:00
|
|
|
self.assert_in_success_response(["Click here to log in to Zulip and catch up."], result)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-04 19:48:40 +01:00
|
|
|
class TestDigestTopics(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def populate_topic(
|
2022-09-27 21:42:31 +02:00
|
|
|
self,
|
|
|
|
topic: DigestTopic,
|
|
|
|
humans: int,
|
|
|
|
human_messages: int,
|
|
|
|
bots: int,
|
|
|
|
bot_messages: int,
|
|
|
|
realm: Realm,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2023-12-14 00:52:04 +01:00
|
|
|
for is_bot, users, messages in [
|
|
|
|
(False, humans, human_messages),
|
|
|
|
(True, bots, bot_messages),
|
|
|
|
]:
|
2021-02-04 19:48:40 +01:00
|
|
|
messages_sent = 0
|
|
|
|
while messages_sent < messages:
|
|
|
|
for index, username in enumerate(self.example_user_map, start=1):
|
2023-12-14 00:52:04 +01:00
|
|
|
if self.example_user(username).is_bot != is_bot:
|
|
|
|
continue
|
|
|
|
topic.add_message(Message(sender=self.example_user(username), realm=realm))
|
2021-02-04 19:48:40 +01:00
|
|
|
messages_sent += 1
|
|
|
|
if messages_sent == messages:
|
|
|
|
break
|
|
|
|
if index == users:
|
|
|
|
break
|
|
|
|
|
|
|
|
def test_get_hot_topics(self) -> None:
|
2022-09-27 19:11:26 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
denmark = get_stream("Denmark", realm)
|
|
|
|
verona = get_stream("Verona", realm)
|
|
|
|
diverse_topic_a = DigestTopic((denmark.id, "5 humans talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
diverse_topic_a, humans=5, human_messages=10, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
diverse_topic_b = DigestTopic((denmark.id, "4 humans talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
diverse_topic_b, humans=4, human_messages=15, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
diverse_topic_c = DigestTopic((verona.id, "5 humans talking in another stream"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
diverse_topic_c, humans=5, human_messages=15, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
diverse_topic_d = DigestTopic((denmark.id, "3 humans and 2 bots talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
diverse_topic_d, humans=3, human_messages=15, bots=2, bot_messages=10, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
diverse_topic_e = DigestTopic((denmark.id, "3 humans talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
diverse_topic_a, humans=3, human_messages=20, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
lengthy_topic_a = DigestTopic((denmark.id, "2 humans talking a lot"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
lengthy_topic_a, humans=2, human_messages=40, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
lengthy_topic_b = DigestTopic((denmark.id, "2 humans talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
lengthy_topic_b, humans=2, human_messages=30, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
lengthy_topic_c = DigestTopic((denmark.id, "a human and bot talking"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
lengthy_topic_c, humans=1, human_messages=20, bots=1, bot_messages=20, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
2022-09-27 19:11:26 +02:00
|
|
|
lengthy_topic_d = DigestTopic((verona.id, "2 humans talking in another stream"))
|
2022-09-27 21:42:31 +02:00
|
|
|
self.populate_topic(
|
|
|
|
lengthy_topic_d, humans=2, human_messages=35, bots=0, bot_messages=0, realm=realm
|
|
|
|
)
|
2021-02-04 19:48:40 +01:00
|
|
|
|
|
|
|
topics = [
|
2021-02-12 08:19:30 +01:00
|
|
|
diverse_topic_a,
|
|
|
|
diverse_topic_b,
|
|
|
|
diverse_topic_c,
|
|
|
|
diverse_topic_d,
|
|
|
|
diverse_topic_e,
|
|
|
|
lengthy_topic_a,
|
|
|
|
lengthy_topic_b,
|
|
|
|
lengthy_topic_c,
|
|
|
|
lengthy_topic_d,
|
2021-02-04 19:48:40 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2022-09-27 19:11:26 +02:00
|
|
|
get_hot_topics(topics, {denmark.id, 0}),
|
2021-02-12 08:19:30 +01:00
|
|
|
[diverse_topic_a, diverse_topic_b, lengthy_topic_a, lengthy_topic_b],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2022-09-27 19:11:26 +02:00
|
|
|
get_hot_topics(topics, {denmark.id, verona.id}),
|
2021-02-12 08:19:30 +01:00
|
|
|
[diverse_topic_a, diverse_topic_c, lengthy_topic_a, lengthy_topic_d],
|
|
|
|
)
|
2022-09-27 19:11:26 +02:00
|
|
|
self.assertEqual(get_hot_topics(topics, {verona.id}), [diverse_topic_c, lengthy_topic_d])
|
2021-02-04 19:48:40 +01:00
|
|
|
self.assertEqual(get_hot_topics(topics, set()), [])
|