2018-03-09 00:27:01 +01:00
|
|
|
import datetime
|
|
|
|
import time
|
2019-03-03 07:14:58 +01:00
|
|
|
from typing import List
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2018-03-09 00:27:01 +01:00
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
from django.test import override_settings
|
2018-03-09 00:27:01 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.actions import do_create_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.digest import (
|
|
|
|
enqueue_emails,
|
|
|
|
exclude_subscription_modified_streams,
|
|
|
|
gather_new_streams,
|
|
|
|
handle_digest_email,
|
|
|
|
)
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2018-03-09 00:27:01 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2018-11-11 16:51:59 +01:00
|
|
|
from zerver.lib.test_helpers import queries_captured
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
UserActivity,
|
|
|
|
UserProfile,
|
|
|
|
flush_per_request_caches,
|
|
|
|
get_client,
|
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
)
|
|
|
|
|
2018-03-09 00:27:01 +01:00
|
|
|
|
|
|
|
class TestDigestEmailMessages(ZulipTestCase):
|
2018-03-09 00:16:44 +01:00
|
|
|
|
2018-11-11 16:51:59 +01:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic')
|
|
|
|
@mock.patch('zerver.lib.digest.send_future_email')
|
|
|
|
def test_multiple_stream_senders(self,
|
|
|
|
mock_send_future_email: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
|
|
|
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
self.subscribe(othello, 'Verona')
|
|
|
|
|
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2019-05-09 18:35:05 +02:00
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2019-05-09 18:35:05 +02:00
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
senders = ['hamlet', 'cordelia', 'iago', 'prospero', 'ZOE']
|
2019-03-03 07:14:58 +01:00
|
|
|
self.simulate_stream_conversation('Verona', senders)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
2018-11-11 18:29:58 +01:00
|
|
|
flush_per_request_caches()
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
|
|
|
one_click_unsubscribe_link(othello, 'digest')
|
2018-11-11 16:51:59 +01:00
|
|
|
with queries_captured() as queries:
|
|
|
|
handle_digest_email(othello.id, cutoff)
|
|
|
|
|
2019-03-10 05:45:47 +01:00
|
|
|
self.assert_length(queries, 6)
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
2018-12-03 23:26:51 +01:00
|
|
|
self.assertEqual(kwargs['to_user_ids'], [othello.id])
|
2018-11-11 16:51:59 +01:00
|
|
|
|
|
|
|
hot_convo = kwargs['context']['hot_conversations'][0]
|
|
|
|
|
|
|
|
expected_participants = {
|
|
|
|
self.example_user(sender).full_name
|
|
|
|
for sender in senders
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEqual(set(hot_convo['participants']), expected_participants)
|
|
|
|
self.assertEqual(hot_convo['count'], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo['first_few_messages'][0]['senders']
|
|
|
|
self.assertIn('some content', teaser_messages[0]['content'][0]['plain'])
|
|
|
|
self.assertIn(teaser_messages[0]['sender'], expected_participants)
|
|
|
|
|
2020-07-24 19:41:27 +02:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic')
|
|
|
|
@mock.patch('zerver.lib.digest.send_future_email')
|
|
|
|
def test_guest_user_multiple_stream_sender(self,
|
|
|
|
mock_send_future_email: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
polonius = self.example_user('polonius')
|
|
|
|
create_stream_if_needed(cordelia.realm, 'web_public_stream', is_web_public=True)
|
|
|
|
self.subscribe(othello, 'web_public_stream')
|
|
|
|
self.subscribe(hamlet, 'web_public_stream')
|
|
|
|
self.subscribe(cordelia, 'web_public_stream')
|
|
|
|
self.subscribe(polonius, 'web_public_stream')
|
|
|
|
|
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
|
|
|
|
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
|
|
|
senders = ['hamlet', 'cordelia', 'othello', 'desdemona']
|
|
|
|
self.simulate_stream_conversation('web_public_stream', senders)
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
|
|
|
one_click_unsubscribe_link(polonius, 'digest')
|
|
|
|
with queries_captured() as queries:
|
|
|
|
handle_digest_email(polonius.id, cutoff)
|
|
|
|
|
|
|
|
self.assert_length(queries, 6)
|
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
|
|
|
self.assertEqual(kwargs['to_user_ids'], [polonius.id])
|
|
|
|
|
|
|
|
new_stream_names = kwargs['context']['new_streams']['plain']
|
|
|
|
self.assertTrue('web_public_stream' in new_stream_names)
|
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic')
|
|
|
|
@mock.patch('zerver.lib.digest.send_future_email')
|
|
|
|
def test_soft_deactivated_user_multiple_stream_senders(self,
|
|
|
|
mock_send_future_email: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
|
|
|
|
|
|
|
one_day_ago = timezone_now() - datetime.timedelta(days=1)
|
2019-08-28 02:43:19 +02:00
|
|
|
Message.objects.all().update(date_sent=one_day_ago)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
for stream in ['Verona', 'Scotland', 'Denmark']:
|
|
|
|
self.subscribe(othello, stream)
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
|
|
|
othello.long_term_idle = True
|
|
|
|
othello.save(update_fields=['long_term_idle'])
|
|
|
|
|
|
|
|
# Send messages to a stream and unsubscribe - subscribe from that stream
|
|
|
|
senders = ['hamlet', 'cordelia', 'iago', 'prospero', 'ZOE']
|
|
|
|
self.simulate_stream_conversation('Verona', senders)
|
|
|
|
self.unsubscribe(othello, 'Verona')
|
|
|
|
self.subscribe(othello, 'Verona')
|
|
|
|
|
|
|
|
# Send messages to other streams
|
|
|
|
self.simulate_stream_conversation('Scotland', senders)
|
|
|
|
self.simulate_stream_conversation('Denmark', senders)
|
|
|
|
|
|
|
|
one_hour_ago = timezone_now() - datetime.timedelta(seconds=3600)
|
|
|
|
cutoff = time.mktime(one_hour_ago.timetuple())
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
2019-03-10 05:45:47 +01:00
|
|
|
# When this test is run in isolation, one additional query is run which
|
|
|
|
# is equivalent to
|
|
|
|
# ContentType.objects.get(app_label='zerver', model='userprofile')
|
|
|
|
# This code is run when we call `confirmation.models.create_confirmation_link`.
|
|
|
|
# To trigger this, we call the one_click_unsubscribe_link function below.
|
|
|
|
one_click_unsubscribe_link(othello, 'digest')
|
2019-03-03 07:14:58 +01:00
|
|
|
with queries_captured() as queries:
|
|
|
|
handle_digest_email(othello.id, cutoff)
|
|
|
|
|
2019-06-04 14:45:42 +02:00
|
|
|
# This can definitely be optimized; for both the huddle and
|
|
|
|
# stream cases, the get_narrow_url API ends up double-fetching
|
|
|
|
# some data because of how the functions are organized.
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
self.assert_length(queries, 10)
|
2019-03-03 07:14:58 +01:00
|
|
|
|
|
|
|
self.assertEqual(mock_send_future_email.call_count, 1)
|
|
|
|
kwargs = mock_send_future_email.call_args[1]
|
|
|
|
self.assertEqual(kwargs['to_user_ids'], [othello.id])
|
|
|
|
|
|
|
|
hot_conversations = kwargs['context']['hot_conversations']
|
|
|
|
self.assertEqual(2, len(hot_conversations), [othello.id])
|
|
|
|
|
|
|
|
hot_convo = hot_conversations[0]
|
|
|
|
expected_participants = {
|
|
|
|
self.example_user(sender).full_name
|
|
|
|
for sender in senders
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEqual(set(hot_convo['participants']), expected_participants)
|
|
|
|
self.assertEqual(hot_convo['count'], 5 - 2) # 5 messages, but 2 shown
|
|
|
|
teaser_messages = hot_convo['first_few_messages'][0]['senders']
|
|
|
|
self.assertIn('some content', teaser_messages[0]['content'][0]['plain'])
|
|
|
|
self.assertIn(teaser_messages[0]['sender'], expected_participants)
|
|
|
|
|
|
|
|
def test_exclude_subscription_modified_streams(self) -> None:
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
for stream in ['Verona', 'Scotland', 'Denmark']:
|
|
|
|
self.subscribe(othello, stream)
|
|
|
|
|
|
|
|
# Delete all RealmAuditLogs to ignore any changes to subscriptions to
|
|
|
|
# streams done for the setup.
|
|
|
|
RealmAuditLog.objects.all().delete()
|
|
|
|
|
|
|
|
realm = othello.realm
|
2020-03-09 21:41:26 +01:00
|
|
|
stream_names = self.get_streams(othello)
|
2019-03-03 07:14:58 +01:00
|
|
|
stream_ids = {name: get_stream(name, realm).id for name in stream_names}
|
|
|
|
|
|
|
|
# Unsubscribe and subscribe from a stream
|
|
|
|
self.unsubscribe(othello, 'Verona')
|
|
|
|
self.subscribe(othello, 'Verona')
|
|
|
|
|
|
|
|
one_sec_ago = timezone_now() - datetime.timedelta(seconds=1)
|
|
|
|
|
|
|
|
filtered_stream_ids = exclude_subscription_modified_streams(
|
|
|
|
othello, list(stream_ids.values()), one_sec_ago)
|
|
|
|
self.assertNotIn(stream_ids['Verona'], filtered_stream_ids)
|
|
|
|
self.assertIn(stream_ids['Scotland'], filtered_stream_ids)
|
|
|
|
self.assertIn(stream_ids['Denmark'], filtered_stream_ids)
|
|
|
|
|
2018-03-09 00:27:01 +01:00
|
|
|
@mock.patch('zerver.lib.digest.queue_digest_recipient')
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
def test_inactive_users_queued_for_digest(self, mock_django_timezone: mock.MagicMock,
|
|
|
|
mock_queue_digest_recipient: mock.MagicMock) -> None:
|
2019-05-08 07:58:26 +02:00
|
|
|
# Turn on realm digest emails for all realms
|
|
|
|
Realm.objects.update(digest_emails_enabled=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
cutoff = timezone_now()
|
|
|
|
# Test Tuesday
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5)
|
|
|
|
all_user_profiles = UserProfile.objects.filter(
|
|
|
|
is_active=True, is_bot=False, enable_digest_emails=True)
|
|
|
|
# Check that all users without an a UserActivity entry are considered
|
|
|
|
# inactive users and get enqueued.
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
self.assertEqual(mock_queue_digest_recipient.call_count, all_user_profiles.count())
|
|
|
|
mock_queue_digest_recipient.reset_mock()
|
2018-08-01 12:51:35 +02:00
|
|
|
for realm in Realm.objects.filter(deactivated=False, digest_emails_enabled=True):
|
2018-03-09 00:27:01 +01:00
|
|
|
user_profiles = all_user_profiles.filter(realm=realm)
|
|
|
|
for user_profile in user_profiles:
|
|
|
|
UserActivity.objects.create(
|
|
|
|
last_visit=cutoff - datetime.timedelta(days=1),
|
|
|
|
user_profile=user_profile,
|
|
|
|
count=0,
|
|
|
|
client=get_client('test_client'))
|
|
|
|
# Check that inactive users are enqueued
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
self.assertEqual(mock_queue_digest_recipient.call_count, all_user_profiles.count())
|
|
|
|
|
2018-03-09 00:16:44 +01:00
|
|
|
@mock.patch('zerver.lib.digest.queue_digest_recipient')
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
|
|
|
def test_disabled(self, mock_django_timezone: mock.MagicMock,
|
|
|
|
mock_queue_digest_recipient: mock.MagicMock) -> None:
|
|
|
|
cutoff = timezone_now()
|
|
|
|
# A Tuesday
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5)
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
mock_queue_digest_recipient.assert_not_called()
|
|
|
|
|
2018-03-09 00:27:01 +01:00
|
|
|
@mock.patch('zerver.lib.digest.enough_traffic', return_value=True)
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
def test_active_users_not_enqueued(self, mock_django_timezone: mock.MagicMock,
|
|
|
|
mock_enough_traffic: mock.MagicMock) -> None:
|
2019-05-08 07:58:26 +02:00
|
|
|
# Turn on realm digest emails for all realms
|
|
|
|
Realm.objects.update(digest_emails_enabled=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
cutoff = timezone_now()
|
|
|
|
# A Tuesday
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5)
|
2018-08-01 12:51:35 +02:00
|
|
|
realms = Realm.objects.filter(deactivated=False, digest_emails_enabled=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
for realm in realms:
|
|
|
|
user_profiles = UserProfile.objects.filter(realm=realm)
|
|
|
|
for counter, user_profile in enumerate(user_profiles, 1):
|
|
|
|
UserActivity.objects.create(
|
|
|
|
last_visit=cutoff + datetime.timedelta(days=1),
|
|
|
|
user_profile=user_profile,
|
|
|
|
count=0,
|
|
|
|
client=get_client('test_client'))
|
|
|
|
# Check that an active user is not enqueued
|
|
|
|
with mock.patch('zerver.lib.digest.queue_digest_recipient') as mock_queue_digest_recipient:
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
self.assertEqual(mock_queue_digest_recipient.call_count, 0)
|
|
|
|
|
|
|
|
@mock.patch('zerver.lib.digest.queue_digest_recipient')
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
def test_only_enqueue_on_valid_day(self, mock_django_timezone: mock.MagicMock,
|
|
|
|
mock_queue_digest_recipient: mock.MagicMock) -> None:
|
|
|
|
# Not a Tuesday
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=6)
|
|
|
|
|
|
|
|
# Check that digests are not sent on days other than Tuesday.
|
|
|
|
cutoff = timezone_now()
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
self.assertEqual(mock_queue_digest_recipient.call_count, 0)
|
|
|
|
|
|
|
|
@mock.patch('zerver.lib.digest.queue_digest_recipient')
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
def test_no_email_digest_for_bots(self, mock_django_timezone: mock.MagicMock,
|
|
|
|
mock_queue_digest_recipient: mock.MagicMock) -> None:
|
2019-05-08 07:58:26 +02:00
|
|
|
# Turn on realm digest emails for all realms
|
|
|
|
Realm.objects.update(digest_emails_enabled=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
cutoff = timezone_now()
|
|
|
|
# A Tuesday
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2016, month=1, day=5)
|
2020-07-16 14:10:43 +02:00
|
|
|
bot = do_create_user(
|
|
|
|
'some_bot@example.com',
|
|
|
|
'password',
|
|
|
|
get_realm('zulip'),
|
|
|
|
'some_bot',
|
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
|
|
|
)
|
2018-03-09 00:27:01 +01:00
|
|
|
UserActivity.objects.create(
|
|
|
|
last_visit=cutoff - datetime.timedelta(days=1),
|
|
|
|
user_profile=bot,
|
|
|
|
count=0,
|
|
|
|
client=get_client('test_client'))
|
|
|
|
|
|
|
|
# Check that bots are not sent emails
|
|
|
|
enqueue_emails(cutoff)
|
|
|
|
for arg in mock_queue_digest_recipient.call_args_list:
|
|
|
|
user = arg[0][0]
|
|
|
|
self.assertNotEqual(user.id, bot.id)
|
|
|
|
|
|
|
|
@mock.patch('zerver.lib.digest.timezone_now')
|
2018-03-09 00:16:44 +01:00
|
|
|
@override_settings(SEND_DIGEST_EMAILS=True)
|
2018-03-09 00:27:01 +01:00
|
|
|
def test_new_stream_link(self, mock_django_timezone: mock.MagicMock) -> None:
|
2020-06-04 03:32:59 +02:00
|
|
|
cutoff = datetime.datetime(year=2017, month=11, day=1, tzinfo=datetime.timezone.utc)
|
|
|
|
mock_django_timezone.return_value = datetime.datetime(year=2017, month=11, day=5, tzinfo=datetime.timezone.utc)
|
2018-03-09 00:27:01 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
stream_id = create_stream_if_needed(cordelia.realm, 'New stream')[0].id
|
|
|
|
new_stream = gather_new_streams(cordelia, cutoff)[1]
|
2020-06-09 00:25:09 +02:00
|
|
|
expected_html = f"<a href='http://zulip.testserver/#narrow/stream/{stream_id}-New-stream'>New stream</a>"
|
2018-03-09 00:27:01 +01:00
|
|
|
self.assertIn(expected_html, new_stream['html'])
|
2018-06-02 15:45:27 +02:00
|
|
|
|
2019-03-03 07:14:58 +01:00
|
|
|
def simulate_stream_conversation(self, stream: str, senders: List[str]) -> List[int]:
|
|
|
|
client = 'website' # this makes `sent_by_human` return True
|
|
|
|
sending_client = get_client(client)
|
|
|
|
message_ids = [] # List[int]
|
|
|
|
for sender_name in senders:
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user(sender_name)
|
2020-06-09 00:25:09 +02:00
|
|
|
content = f'some content for {stream} from {sender_name}'
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(sender, stream, content)
|
2019-03-03 07:14:58 +01:00
|
|
|
message_ids.append(message_id)
|
|
|
|
Message.objects.filter(id__in=message_ids).update(sending_client=sending_client)
|
|
|
|
return message_ids
|
|
|
|
|
2018-08-12 22:09:34 +02:00
|
|
|
class TestDigestContentInBrowser(ZulipTestCase):
|
|
|
|
def test_get_digest_content_in_browser(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('hamlet')
|
2018-08-12 22:09:34 +02:00
|
|
|
result = self.client_get("/digest/")
|
2019-05-01 06:08:05 +02:00
|
|
|
self.assert_in_success_response(["Click here to log in to Zulip and catch up."], result)
|