2018-03-09 00:27:01 +01:00
|
|
|
import datetime
|
|
|
|
import time
|
2019-03-03 07:14:58 +01:00
|
|
|
from typing import List
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
from django.test import override_settings
|
2018-03-09 00:27:01 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.actions import do_create_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.digest import (
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm,
|
2020-11-04 23:40:29 +01:00
|
|
|
bulk_handle_digest_email,
|
2020-11-12 13:01:07 +01:00
|
|
|
bulk_write_realm_audit_logs,
|
2020-06-11 00:54:34 +02:00
|
|
|
enqueue_emails,
|
|
|
|
gather_new_streams,
|
2020-11-13 12:27:39 +01:00
|
|
|
get_modified_streams,
|
2020-11-13 17:25:52 +01:00
|
|
|
get_recent_streams,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2020-11-12 12:11:35 +01:00
|
|
|
from zerver.lib.message import get_last_message_id
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2018-03-09 00:27:01 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2020-11-04 23:51:17 +01:00
|
|
|
from zerver.lib.test_helpers import cache_tries_captured, queries_captured
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
2020-11-12 15:17:33 +01:00
|
|
|
Stream,
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
|
|
|
flush_per_request_caches,
|
|
|
|
get_client,
|
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
)
|
|
|
|
|
2018-03-09 00:27:01 +01:00
|
|
|
|
|
|
|
class TestDigestEmailMessages(ZulipTestCase):
|
2018-03-09 00:16:44 +01:00
|
|
|
|
2018-11-11 16:51:59 +01:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic')
|
|
|
|
@mock.patch('zerver.lib.digest.send_future_email')
|
|
|
|
def test_multiple_stream_senders(self,
|
|
|
|
mock_send_future_email: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
|
|
|
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
self.subscribe(othello, 'Verona')
|
|
|
|
|
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2019-05-09 18:35:05 +02:00
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2019-05-09 18:35:05 +02:00
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
senders = ['hamlet', 'cordelia', 'iago', 'prospero', 'ZOE']
|
2019-03-03 07:14:58 +01:00
|
|
|
self.simulate_stream_conversation('Verona', senders)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2020-11-17 13:31:08 +01:00
|
|
|
# Remove RealmAuditoLog rows, so we don't exclude polonius.
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2018-11-11 18:29:58 +01:00
|
|
|
flush_per_request_caches()
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
|
|
|
one_click_unsubscribe_link(othello, 'digest')
|
2018-11-11 16:51:59 +01:00
|
|
|
with queries_captured() as queries:
|
2020-11-13 18:27:40 +01:00
|
|
|
bulk_handle_digest_email([othello.id], cutoff)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2020-11-13 16:52:13 +01:00
|
|
|
self.assert_length(queries, 9)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
2018-12-03 23:26:51 +01:00
|
|
|
self.assertEqual(kwargs['to_user_ids'], [othello.id])
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
hot_convo = kwargs['context']['hot_conversations'][0]
|
|
|
|
|
|
|
|
expected_participants = {
|
|
|
|
self.example_user(sender).full_name
|
|
|
|
for sender in senders
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEqual(set(hot_convo['participants']), expected_participants)
|
|
|
|
self.assertEqual(hot_convo['count'], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo['first_few_messages'][0]['senders']
|
|
|
|
self.assertIn('some content', teaser_messages[0]['content'][0]['plain'])
|
|
|
|
self.assertIn(teaser_messages[0]['sender'], expected_participants)
|
|
|
|
|
2020-07-24 19:41:27 +02:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic')
|
|
|
|
@mock.patch('zerver.lib.digest.send_future_email')
|
|
|
|
def test_guest_user_multiple_stream_sender(self,
|
|
|
|
mock_send_future_email: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
polonius = self.example_user('polonius')
|
|
|
|
create_stream_if_needed(cordelia.realm, 'web_public_stream', is_web_public=True)
|
|
|
|
self.subscribe(othello, 'web_public_stream')
|
|
|
|
self.subscribe(hamlet, 'web_public_stream')
|
|
|
|
self.subscribe(cordelia, 'web_public_stream')
|
|
|
|
self.subscribe(polonius, 'web_public_stream')
|
|
|
|
|
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
|
|
|
|
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
|
|
|
senders = ['hamlet', 'cordelia', 'othello', 'desdemona']
|
|
|
|
self.simulate_stream_conversation('web_public_stream', senders)
|
|
|
|
|
2020-11-17 13:31:08 +01:00
|
|
|
# Remove RealmAuditoLog rows, so we don't exclude polonius.
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2020-07-24 19:41:27 +02:00
|
|
|
flush_per_request_caches()
|
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
|
|
|
one_click_unsubscribe_link(polonius, 'digest')
|
|
|
|
with queries_captured() as queries:
|
2020-11-13 18:27:40 +01:00
|
|
|
bulk_handle_digest_email([polonius.id], cutoff)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
2020-11-13 16:52:13 +01:00
|
|
|
self.assert_length(queries, 9)
|
2020-07-24 19:41:27 +02:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
|
|
|
self.assertEqual(kwargs['to_user_ids'], [polonius.id])
|
|
|
|
|
|
|
|
new_stream_names = kwargs['context']['new_streams']['plain']
|
|
|
|
self.assertTrue('web_public_stream' in new_stream_names)
|
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
def test_soft_deactivated_user_multiple_stream_senders(self) -> None:
|
2019-03-03 07:14:58 +01:00
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-03 22:11:59 +01:00
|
|
|
digest_users = [
|
|
|
|
self.example_user('othello'),
|
2020-11-04 19:37:46 +01:00
|
|
|
self.example_user('aaron'),
|
|
|
|
self.example_user('desdemona'),
|
|
|
|
self.example_user('polonius'),
|
2020-11-03 22:11:59 +01:00
|
|
|
]
|
2020-11-12 16:00:39 +01:00
|
|
|
digest_users.sort(key = lambda user: user.id)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
|
|
|
for digest_user in digest_users:
|
|
|
|
for stream in ['Verona', 'Scotland', 'Denmark']:
|
|
|
|
self.subscribe(digest_user, stream)
|
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
|
|
|
# Send messages to a stream and unsubscribe - subscribe from that stream
|
|
|
|
senders = ['hamlet', 'cordelia', 'iago', 'prospero', 'ZOE']
|
|
|
|
self.simulate_stream_conversation('Verona', senders)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
|
|
|
for digest_user in digest_users:
|
|
|
|
self.unsubscribe(digest_user, 'Verona')
|
|
|
|
self.subscribe(digest_user, 'Verona')
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
# Send messages to other streams
|
|
|
|
self.simulate_stream_conversation('Scotland', senders)
|
|
|
|
self.simulate_stream_conversation('Denmark', senders)
|
|
|
|
|
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
2020-11-03 22:11:59 +01:00
|
|
|
one_click_unsubscribe_link(digest_users[0], 'digest')
|
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
with mock.patch('zerver.lib.digest.send_future_email') as mock_send_future_email:
|
2020-11-04 23:40:29 +01:00
|
|
|
digest_user_ids = [user.id for user in digest_users]
|
2020-11-04 19:37:46 +01:00
|
|
|
|
2020-11-04 23:40:29 +01:00
|
|
|
with queries_captured() as queries:
|
|
|
|
with cache_tries_captured() as cache_tries:
|
|
|
|
bulk_handle_digest_email(digest_user_ids, cutoff)
|
2020-11-04 19:40:42 +01:00
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
self.assert_length(queries, 12)
|
2020-11-12 16:00:39 +01:00
|
|
|
self.assert_length(cache_tries, 0)
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
self.assertEqual(mock_send_future_email.call_count, len(digest_users))
|
2020-11-03 22:11:59 +01:00
|
|
|
|
2020-11-04 19:40:42 +01:00
|
|
|
for i, digest_user in enumerate(digest_users):
|
|
|
|
kwargs = mock_send_future_email.call_args_list[i][1]
|
2020-11-03 22:11:59 +01:00
|
|
|
self.assertEqual(kwargs['to_user_ids'], [digest_user.id])
|
|
|
|
|
|
|
|
hot_conversations = kwargs['context']['hot_conversations']
|
|
|
|
self.assertEqual(2, len(hot_conversations), [digest_user.id])
|
|
|
|
|
|
|
|
hot_convo = hot_conversations[0]
|
|
|
|
expected_participants = {
|
|
|
|
self.example_user(sender).full_name
|
|
|
|
for sender in senders
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEqual(set(hot_convo['participants']), expected_participants)
|
|
|
|
self.assertEqual(hot_convo['count'], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo['first_few_messages'][0]['senders']
|
|
|
|
self.assertIn('some content', teaser_messages[0]['content'][0]['plain'])
|
|
|
|
self.assertIn(teaser_messages[0]['sender'], expected_participants)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-12 12:11:35 +01:00
|
|
|
last_message_id = get_last_message_id()
|
|
|
|
for digest_user in digest_users:
|
|
|
|
log_rows = RealmAuditLog.objects.filter(
|
|
|
|
modified_user_id=digest_user.id,
|
|
|
|
event_type=RealmAuditLog.USER_DIGEST_EMAIL_CREATED,
|
|
|
|
)
|
|
|
|
(log,) = log_rows
|
|
|
|
self.assertEqual(log.event_last_message_id, last_message_id)
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
def test_streams_recently_modified_for_user(self) -> None:
|
2019-03-03 07:14:58 +01:00
|
|
|
othello = self.example_user('othello')
|
2020-11-05 14:55:45 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
for stream in ['Verona', 'Scotland', 'Denmark']:
|
|
|
|
self.subscribe(othello, stream)
|
2020-11-05 14:55:45 +01:00
|
|
|
self.subscribe(cordelia, stream)
|
|
|
|
|
|
|
|
realm = othello.realm
|
|
|
|
denmark = get_stream('Denmark', realm)
|
|
|
|
verona = get_stream('Verona', realm)
|
|
|
|
|
|
|
|
two_hours_ago = timezone_now() - datetime.timedelta(hours=2)
|
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(hours=1)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Delete all RealmAuditLogs to start with a clean slate.
|
2019-03-03 07:14:58 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Unsubscribe and subscribe Othello from a stream
|
|
|
|
self.unsubscribe(othello, 'Denmark')
|
|
|
|
self.subscribe(othello, 'Denmark')
|
|
|
|
|
2020-11-13 12:27:39 +01:00
|
|
|
recent_streams = get_modified_streams([othello.id], one_hour_ago)
|
|
|
|
self.assertEqual(recent_streams[othello.id], {denmark.id})
|
2020-11-05 14:55:45 +01:00
|
|
|
|
|
|
|
# Backdate all our logs (so that Denmark will no longer
|
|
|
|
# appear like a recently modified stream for Othello).
|
|
|
|
RealmAuditLog.objects.all().update(event_time=two_hours_ago)
|
|
|
|
|
|
|
|
# Now Denmark no longer appears recent to Othello.
|
2020-11-13 12:27:39 +01:00
|
|
|
recent_streams = get_modified_streams([othello.id], one_hour_ago)
|
|
|
|
self.assertEqual(recent_streams[othello.id], set())
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
# Unsubscribe and subscribe from a stream
|
|
|
|
self.unsubscribe(othello, 'Verona')
|
|
|
|
self.subscribe(othello, 'Verona')
|
|
|
|
|
2020-11-05 14:55:45 +01:00
|
|
|
# Now, Verona, but not Denmark, appears recent.
|
2020-11-13 12:27:39 +01:00
|
|
|
recent_streams = get_modified_streams([othello.id], one_hour_ago)
|
|
|
|
self.assertEqual(recent_streams[othello.id], {verona.id})
|
2020-11-05 14:55:45 +01:00
|
|
|
|
|
|
|
# make sure we don't mix up Othello and Cordelia
|
|
|
|
self.unsubscribe(cordelia, 'Denmark')
|
|
|
|
self.subscribe(cordelia, 'Denmark')
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-13 12:27:39 +01:00
|
|
|
recent_streams = get_modified_streams([othello.id, cordelia.id], one_hour_ago)
|
|
|
|
self.assertEqual(recent_streams[cordelia.id], {denmark.id})
|
2019-03-03 07:14:58 +01:00
|
|
|
|
2020-11-12 13:01:07 +01:00
|
|
|
def active_human_users(self, realm: Realm) -> List[UserProfile]:
|
|
|
|
users = list(UserProfile.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
is_active=True,
|
|
|
|
is_bot=False,
|
|
|
|
enable_digest_emails=True,
|
|
|
|
))
|
|
|
|
|
|
|
|
assert len(users) >= 5
|
|
|
|
|
|
|
|
return users
|
|
|
|
|
|
|
|
def test_twelve_hour_exemption(self) -> None:
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
|
|
|
|
cutoff = timezone_now() - datetime.timedelta(days=5)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
users = self.active_human_users(realm)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
|
|
|
self.assertEqual(num_queued_users, len(users))
|
2020-11-12 13:01:07 +01:00
|
|
|
|
|
|
|
# Simulate that we have sent digests for all our users.
|
|
|
|
bulk_write_realm_audit_logs(users)
|
|
|
|
|
|
|
|
# Now if we run again, we won't get any users, since they will have
|
|
|
|
# recent RealmAuditLog rows.
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 13:01:07 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
self.assertEqual(queue_mock.call_count, 0)
|
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:27:04 +01:00
|
|
|
def test_inactive_users_queued_for_digest(self) -> None:
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.all().delete()
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2019-05-08 07:58:26 +02:00
|
|
|
# Turn on realm digest emails for all realms
|
|
|
|
Realm.objects.update(digest_emails_enabled=True)
|
2020-11-12 14:27:04 +01:00
|
|
|
cutoff = timezone_now() - datetime.timedelta(days=5)
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
users = self.active_human_users(realm)
|
|
|
|
|
2020-11-12 16:55:32 +01:00
|
|
|
# Check that all users without an a UserActivityInterval entry are considered
|
2018-03-09 00:27:01 +01:00
|
|
|
# inactive users and get enqueued.
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
|
|
|
self.assertEqual(num_queued_users, len(users))
|
2020-11-12 14:27:04 +01:00
|
|
|
|
|
|
|
for user in users:
|
|
|
|
last_visit = timezone_now() - datetime.timedelta(days=1)
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.create(
|
|
|
|
start=last_visit,
|
|
|
|
end=last_visit,
|
2020-11-12 14:27:04 +01:00
|
|
|
user_profile=user,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now we expect no users, due to recent activity.
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
|
|
|
self.assertEqual(queue_mock.call_count, 0)
|
|
|
|
|
|
|
|
# Now, backdate all our users activity.
|
|
|
|
last_visit = timezone_now() - datetime.timedelta(days=7)
|
2020-11-12 16:55:32 +01:00
|
|
|
UserActivityInterval.objects.all().update(start=last_visit, end=last_visit)
|
2020-11-12 14:27:04 +01:00
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 14:27:04 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
|
|
|
self.assertEqual(num_queued_users, len(users))
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2020-11-12 14:42:46 +01:00
|
|
|
def tuesday(self) -> datetime.datetime:
|
|
|
|
return datetime.datetime(year=2016, month=1, day=5, tzinfo=datetime.timezone.utc)
|
|
|
|
|
|
|
|
@override_settings(SEND_DIGEST_EMAILS=False)
|
|
|
|
def test_disabled(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2018-03-09 00:16:44 +01:00
|
|
|
|
2020-11-12 14:42:46 +01:00
|
|
|
tuesday = self.tuesday()
|
|
|
|
cutoff = tuesday - datetime.timedelta(days=5)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.digest.timezone_now", return_value=tuesday):
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock:
|
2020-11-12 14:42:46 +01:00
|
|
|
enqueue_emails(cutoff)
|
|
|
|
queue_mock.assert_not_called()
|
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:42:46 +01:00
|
|
|
def test_only_enqueue_on_valid_day(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2020-11-12 14:42:46 +01:00
|
|
|
not_tuesday = datetime.datetime(year=2016, month=1, day=6, tzinfo=datetime.timezone.utc)
|
|
|
|
cutoff = not_tuesday - datetime.timedelta(days=5)
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.digest.timezone_now", return_value=not_tuesday):
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch("zerver.lib.digest.queue_digest_user_ids") as queue_mock:
|
2020-11-12 14:42:46 +01:00
|
|
|
enqueue_emails(cutoff)
|
|
|
|
queue_mock.assert_not_called()
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 14:59:39 +01:00
|
|
|
def test_no_email_digest_for_bots(self) -> None:
|
2020-11-12 13:01:07 +01:00
|
|
|
RealmAuditLog.objects.all().delete()
|
2020-11-12 14:59:39 +01:00
|
|
|
|
|
|
|
cutoff = timezone_now() - datetime.timedelta(days=5)
|
|
|
|
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
realm.digest_emails_enabled = True
|
|
|
|
realm.save()
|
|
|
|
|
2020-07-16 14:10:43 +02:00
|
|
|
bot = do_create_user(
|
|
|
|
'some_bot@example.com',
|
|
|
|
'password',
|
2020-11-12 14:59:39 +01:00
|
|
|
realm,
|
2020-07-16 14:10:43 +02:00
|
|
|
'some_bot',
|
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
|
|
|
)
|
2018-03-09 00:27:01 +01:00
|
|
|
|
|
|
|
# Check that bots are not sent emails
|
2020-11-13 18:13:13 +01:00
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_user_ids') as queue_mock:
|
2020-11-12 14:59:39 +01:00
|
|
|
_enqueue_emails_for_realm(realm, cutoff)
|
|
|
|
|
2020-11-13 18:13:13 +01:00
|
|
|
num_queued_users = len(queue_mock.call_args[0][0])
|
|
|
|
assert num_queued_users >= 5
|
2020-11-12 14:59:39 +01:00
|
|
|
|
|
|
|
for arg in queue_mock.call_args_list:
|
2020-11-13 18:13:13 +01:00
|
|
|
user_ids = arg[0][0]
|
|
|
|
for user_id in user_ids:
|
|
|
|
self.assertNotEqual(user_id, bot.id)
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2020-11-12 15:17:33 +01:00
|
|
|
def test_new_stream_link(self) -> None:
|
|
|
|
Stream.objects.all().delete()
|
|
|
|
cutoff = timezone_now() - datetime.timedelta(days=5)
|
2018-03-09 00:27:01 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
2020-11-12 15:17:33 +01:00
|
|
|
stream = create_stream_if_needed(cordelia.realm, 'New stream')[0]
|
|
|
|
stream.date_created = timezone_now()
|
|
|
|
stream.save()
|
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
realm = cordelia.realm
|
|
|
|
|
|
|
|
recent_streams = get_recent_streams(realm, cutoff)
|
|
|
|
stream_count, stream_info = gather_new_streams(realm, recent_streams, can_access_public=True)
|
2020-11-12 15:17:33 +01:00
|
|
|
self.assertEqual(stream_count, 1)
|
|
|
|
expected_html = f"<a href='http://zulip.testserver/#narrow/stream/{stream.id}-New-stream'>New stream</a>"
|
|
|
|
self.assertEqual(stream_info['html'][0], expected_html)
|
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
# guests don't see our stream
|
|
|
|
stream_count, stream_info = gather_new_streams(realm, recent_streams, can_access_public=False)
|
|
|
|
self.assertEqual(stream_count, 0)
|
|
|
|
self.assertEqual(stream_info['html'], [])
|
|
|
|
|
|
|
|
# but they do if we make it web public
|
|
|
|
stream.is_web_public = True
|
|
|
|
stream.save()
|
|
|
|
|
|
|
|
recent_streams = get_recent_streams(realm, cutoff)
|
|
|
|
stream_count, stream_info = gather_new_streams(realm, recent_streams, can_access_public=True)
|
|
|
|
self.assertEqual(stream_count, 1)
|
|
|
|
|
2020-11-12 15:17:33 +01:00
|
|
|
# Make the stream appear to be older.
|
|
|
|
stream.date_created = timezone_now() - datetime.timedelta(days=7)
|
|
|
|
stream.save()
|
|
|
|
|
2020-11-13 17:25:52 +01:00
|
|
|
recent_streams = get_recent_streams(realm, cutoff)
|
|
|
|
stream_count, stream_info = gather_new_streams(realm, recent_streams, can_access_public=True)
|
2020-11-12 15:17:33 +01:00
|
|
|
self.assertEqual(stream_count, 0)
|
|
|
|
self.assertEqual(stream_info['html'], [])
|
2018-06-02 15:45:27 +02:00
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
def simulate_stream_conversation(self, stream: str, senders: List[str]) -> List[int]:
|
|
|
|
client = 'website' # this makes `sent_by_human` return True
|
|
|
|
sending_client = get_client(client)
|
|
|
|
message_ids = [] # List[int]
|
|
|
|
for sender_name in senders:
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user(sender_name)
|
2020-06-09 00:25:09 +02:00
|
|
|
content = f'some content for {stream} from {sender_name}'
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(sender, stream, content)
|
2019-03-03 07:14:58 +01:00
|
|
|
message_ids.append(message_id)
|
|
|
|
Message.objects.filter(id__in=message_ids).update(sending_client=sending_client)
|
|
|
|
return message_ids
|
|
|
|
|
2018-08-12 22:09:34 +02:00
|
|
|
class TestDigestContentInBrowser(ZulipTestCase):
|
|
|
|
def test_get_digest_content_in_browser(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2018-08-12 22:09:34 +02:00
|
|
|
result = self.client_get("/digest/")
|
2019-05-01 06:08:05 +02:00
|
|
|
self.assert_in_success_response(["Click here to log in to Zulip and catch up."], result)
|