2017-03-08 12:18:27 +01:00
|
|
|
|
|
|
|
import os
|
|
|
|
import time
|
|
|
|
import ujson
|
2017-09-15 09:38:12 +02:00
|
|
|
import smtplib
|
2017-03-08 12:18:27 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.http import HttpResponse
|
|
|
|
from django.test import TestCase
|
2017-10-06 07:15:58 +02:00
|
|
|
from mock import patch, MagicMock
|
2017-03-08 12:18:27 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Mapping, Tuple
|
|
|
|
|
|
|
|
from zerver.lib.test_helpers import simulated_queue_client
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2017-12-07 00:58:34 +01:00
|
|
|
from zerver.models import get_client, UserActivity, PreregistrationUser
|
2017-03-08 12:18:27 +01:00
|
|
|
from zerver.worker import queue_processors
|
2017-11-10 15:00:45 +01:00
|
|
|
from zerver.worker.queue_processors import (
|
|
|
|
get_active_worker_queues,
|
|
|
|
QueueProcessingWorker,
|
|
|
|
LoopQueueProcessingWorker,
|
2017-11-15 15:27:41 +01:00
|
|
|
MissedMessageWorker,
|
2017-11-10 15:00:45 +01:00
|
|
|
)
|
2017-03-08 12:18:27 +01:00
|
|
|
|
2017-11-15 15:27:41 +01:00
|
|
|
Event = Dict[str, Any]
|
|
|
|
|
|
|
|
# This is used for testing LoopQueueProcessingWorker, which
|
|
|
|
# would run forever if we don't mock time.sleep to abort the
|
|
|
|
# loop.
|
|
|
|
class AbortLoop(Exception):
|
|
|
|
pass
|
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
class WorkerTest(ZulipTestCase):
|
2017-11-05 11:49:43 +01:00
|
|
|
class FakeClient:
|
2017-11-05 10:51:25 +01:00
|
|
|
def __init__(self) -> None:
|
2017-11-02 17:34:39 +01:00
|
|
|
self.consumers = {} # type: Dict[str, Callable[[Dict[str, Any]], None]]
|
2017-05-07 20:02:56 +02:00
|
|
|
self.queue = [] # type: List[Tuple[str, Dict[str, Any]]]
|
2017-03-08 12:18:27 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def register_json_consumer(self,
|
|
|
|
queue_name: str,
|
|
|
|
callback: Callable[[Dict[str, Any]], None]) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
self.consumers[queue_name] = callback
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def start_consuming(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
for queue_name, data in self.queue:
|
|
|
|
callback = self.consumers[queue_name]
|
|
|
|
callback(data)
|
|
|
|
|
2017-11-15 15:27:41 +01:00
|
|
|
def drain_queue(self, queue_name: str, json: bool) -> List[Event]:
|
|
|
|
assert json
|
|
|
|
events = [
|
|
|
|
dct
|
|
|
|
for (queue_name, dct)
|
|
|
|
in self.queue
|
|
|
|
]
|
|
|
|
|
|
|
|
# IMPORTANT!
|
|
|
|
# This next line prevents us from double draining
|
|
|
|
# queues, which was a bug at one point.
|
|
|
|
self.queue = []
|
|
|
|
|
|
|
|
return events
|
|
|
|
|
|
|
|
def test_missed_message_worker(self) -> None:
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
|
|
|
|
hamlet1_msg_id = self.send_personal_message(
|
|
|
|
from_email=cordelia.email,
|
|
|
|
to_email=hamlet.email,
|
|
|
|
content='hi hamlet',
|
|
|
|
)
|
|
|
|
|
|
|
|
hamlet2_msg_id = self.send_personal_message(
|
|
|
|
from_email=cordelia.email,
|
|
|
|
to_email=hamlet.email,
|
|
|
|
content='goodbye hamlet',
|
|
|
|
)
|
|
|
|
|
|
|
|
othello_msg_id = self.send_personal_message(
|
|
|
|
from_email=cordelia.email,
|
|
|
|
to_email=othello.email,
|
|
|
|
content='where art thou, othello?',
|
|
|
|
)
|
|
|
|
|
|
|
|
events = [
|
|
|
|
dict(user_profile_id=hamlet.id, message_id=hamlet1_msg_id),
|
|
|
|
dict(user_profile_id=hamlet.id, message_id=hamlet2_msg_id),
|
|
|
|
dict(user_profile_id=othello.id, message_id=othello_msg_id),
|
|
|
|
]
|
|
|
|
|
|
|
|
fake_client = self.FakeClient()
|
|
|
|
for event in events:
|
|
|
|
fake_client.queue.append(('missedmessage_emails', event))
|
|
|
|
|
|
|
|
mmw = MissedMessageWorker()
|
|
|
|
|
|
|
|
time_mock = patch(
|
|
|
|
'zerver.worker.queue_processors.time.sleep',
|
|
|
|
side_effect=AbortLoop,
|
|
|
|
)
|
|
|
|
|
|
|
|
send_mock = patch(
|
|
|
|
'zerver.lib.notifications.do_send_missedmessage_events_reply_in_zulip'
|
|
|
|
)
|
|
|
|
|
|
|
|
with send_mock as sm, time_mock as tm:
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
try:
|
|
|
|
mmw.setup()
|
|
|
|
mmw.start()
|
|
|
|
except AbortLoop:
|
|
|
|
pass
|
|
|
|
|
|
|
|
self.assertEqual(tm.call_args[0][0], 120) # should sleep two minutes
|
|
|
|
|
|
|
|
args = [c[0] for c in sm.call_args_list]
|
|
|
|
arg_dict = {
|
|
|
|
arg[0].id: dict(
|
|
|
|
missed_messages=arg[1],
|
|
|
|
count=arg[2],
|
|
|
|
)
|
|
|
|
for arg in args
|
|
|
|
}
|
|
|
|
|
|
|
|
hamlet_info = arg_dict[hamlet.id]
|
|
|
|
self.assertEqual(hamlet_info['count'], 2)
|
|
|
|
self.assertEqual(
|
|
|
|
{m.content for m in hamlet_info['missed_messages']},
|
|
|
|
{'hi hamlet', 'goodbye hamlet'},
|
|
|
|
)
|
|
|
|
|
|
|
|
othello_info = arg_dict[othello.id]
|
|
|
|
self.assertEqual(othello_info['count'], 1)
|
|
|
|
self.assertEqual(
|
|
|
|
{m.content for m in othello_info['missed_messages']},
|
|
|
|
{'where art thou, othello?'}
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mirror_worker(self) -> None:
|
2017-04-05 11:46:14 +02:00
|
|
|
fake_client = self.FakeClient()
|
|
|
|
data = [
|
|
|
|
dict(
|
|
|
|
message=u'\xf3test',
|
|
|
|
time=time.time(),
|
2017-05-24 05:08:49 +02:00
|
|
|
rcpt_to=self.example_email('hamlet'),
|
2017-04-05 11:46:14 +02:00
|
|
|
),
|
|
|
|
dict(
|
|
|
|
message='\xf3test',
|
|
|
|
time=time.time(),
|
2017-05-24 05:08:49 +02:00
|
|
|
rcpt_to=self.example_email('hamlet'),
|
2017-04-05 11:46:14 +02:00
|
|
|
),
|
|
|
|
dict(
|
|
|
|
message='test',
|
|
|
|
time=time.time(),
|
2017-05-24 05:08:49 +02:00
|
|
|
rcpt_to=self.example_email('hamlet'),
|
2017-04-05 11:46:14 +02:00
|
|
|
),
|
|
|
|
]
|
|
|
|
for element in data:
|
|
|
|
fake_client.queue.append(('email_mirror', element))
|
|
|
|
|
|
|
|
with patch('zerver.worker.queue_processors.mirror_email'):
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = queue_processors.MirrorWorker()
|
|
|
|
worker.setup()
|
|
|
|
worker.start()
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_email_sending_worker_retries(self) -> None:
|
2017-09-15 09:38:12 +02:00
|
|
|
"""Tests the retry_send_email_failures decorator to make sure it
|
|
|
|
retries sending the email 3 times and then gives up."""
|
|
|
|
fake_client = self.FakeClient()
|
|
|
|
|
2017-10-28 03:14:13 +02:00
|
|
|
data = {'test': 'test', 'id': 'test_missed'}
|
2017-09-15 09:38:12 +02:00
|
|
|
fake_client.queue.append(('missedmessage_email_senders', data))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def fake_publish(queue_name: str,
|
|
|
|
event: Dict[str, Any],
|
|
|
|
processor: Callable[[Any], None]) -> None:
|
2017-09-15 09:38:12 +02:00
|
|
|
fake_client.queue.append((queue_name, event))
|
|
|
|
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = queue_processors.MissedMessageSendingWorker()
|
|
|
|
worker.setup()
|
|
|
|
with patch('zerver.worker.queue_processors.send_email_from_dict',
|
|
|
|
side_effect=smtplib.SMTPServerDisconnected), \
|
|
|
|
patch('zerver.lib.queue.queue_json_publish',
|
|
|
|
side_effect=fake_publish), \
|
|
|
|
patch('logging.exception'):
|
|
|
|
worker.start()
|
|
|
|
|
|
|
|
self.assertEqual(data['failed_tries'], 4)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_signups_worker_retries(self) -> None:
|
2017-10-06 07:15:58 +02:00
|
|
|
"""Tests the retry logic of signups queue."""
|
|
|
|
fake_client = self.FakeClient()
|
|
|
|
|
|
|
|
user_id = self.example_user('hamlet').id
|
2017-10-28 03:14:13 +02:00
|
|
|
data = {'user_id': user_id, 'id': 'test_missed'}
|
2017-10-06 07:15:58 +02:00
|
|
|
fake_client.queue.append(('signups', data))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def fake_publish(queue_name: str, event: Dict[str, Any], processor: Callable[[Any], None]) -> None:
|
2017-10-06 07:15:58 +02:00
|
|
|
fake_client.queue.append((queue_name, event))
|
|
|
|
|
|
|
|
fake_response = MagicMock()
|
|
|
|
fake_response.status_code = 400
|
|
|
|
fake_response.text = ujson.dumps({'title': ''})
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = queue_processors.SignupWorker()
|
|
|
|
worker.setup()
|
|
|
|
with patch('zerver.worker.queue_processors.requests.post',
|
|
|
|
return_value=fake_response), \
|
|
|
|
patch('zerver.lib.queue.queue_json_publish',
|
|
|
|
side_effect=fake_publish), \
|
|
|
|
patch('logging.info'), \
|
|
|
|
self.settings(MAILCHIMP_API_KEY='one-two',
|
|
|
|
PRODUCTION=True,
|
|
|
|
ZULIP_FRIENDS_LIST_ID='id'):
|
|
|
|
worker.start()
|
|
|
|
|
|
|
|
self.assertEqual(data['failed_tries'], 4)
|
|
|
|
|
2017-12-07 00:58:34 +01:00
|
|
|
def test_invites_worker(self) -> None:
|
|
|
|
fake_client = self.FakeClient()
|
|
|
|
invitor = self.example_user('iago')
|
2017-12-05 09:01:41 +01:00
|
|
|
prereg_alice = PreregistrationUser.objects.create(
|
|
|
|
email=self.nonreg_email('alice'), referred_by=invitor, realm=invitor.realm)
|
|
|
|
prereg_bob = PreregistrationUser.objects.create(
|
2017-12-07 00:58:34 +01:00
|
|
|
email=self.nonreg_email('bob'), referred_by=invitor, realm=invitor.realm)
|
|
|
|
data = [
|
2017-12-05 09:01:41 +01:00
|
|
|
dict(prereg_id=prereg_alice.id, referrer_id=invitor.id, email_body=None),
|
|
|
|
# Nonexistent prereg_id, as if the invitation was deleted
|
|
|
|
dict(prereg_id=-1, referrer_id=invitor.id, email_body=None),
|
|
|
|
# Form with `email` is from versions up to Zulip 1.7.1
|
2017-12-07 00:58:34 +01:00
|
|
|
dict(email=self.nonreg_email('bob'), referrer_id=invitor.id, email_body=None),
|
|
|
|
]
|
|
|
|
for element in data:
|
|
|
|
fake_client.queue.append(('invites', element))
|
|
|
|
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = queue_processors.ConfirmationEmailWorker()
|
|
|
|
worker.setup()
|
|
|
|
with patch('zerver.worker.queue_processors.do_send_confirmation_email'), \
|
|
|
|
patch('zerver.worker.queue_processors.create_confirmation_link'), \
|
|
|
|
patch('zerver.worker.queue_processors.send_future_email') \
|
|
|
|
as send_mock, \
|
|
|
|
patch('logging.info'):
|
|
|
|
worker.start()
|
2017-12-05 09:01:41 +01:00
|
|
|
self.assertEqual(send_mock.call_count, 2)
|
2017-12-07 00:58:34 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_UserActivityWorker(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
fake_client = self.FakeClient()
|
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
user = self.example_user('hamlet')
|
2017-03-08 12:18:27 +01:00
|
|
|
UserActivity.objects.filter(
|
|
|
|
user_profile = user.id,
|
|
|
|
client = get_client('ios')
|
|
|
|
).delete()
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
user_profile_id = user.id,
|
|
|
|
client = 'ios',
|
|
|
|
time = time.time(),
|
|
|
|
query = 'send_message'
|
|
|
|
)
|
|
|
|
fake_client.queue.append(('user_activity', data))
|
|
|
|
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = queue_processors.UserActivityWorker()
|
|
|
|
worker.setup()
|
|
|
|
worker.start()
|
|
|
|
activity_records = UserActivity.objects.filter(
|
|
|
|
user_profile = user.id,
|
|
|
|
client = get_client('ios')
|
|
|
|
)
|
|
|
|
self.assertTrue(len(activity_records), 1)
|
|
|
|
self.assertTrue(activity_records[0].count, 1)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_error_handling(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
processed = []
|
|
|
|
|
|
|
|
@queue_processors.assign_queue('unreliable_worker')
|
|
|
|
class UnreliableWorker(queue_processors.QueueProcessingWorker):
|
2017-11-05 10:51:25 +01:00
|
|
|
def consume(self, data: Mapping[str, Any]) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
if data["type"] == 'unexpected behaviour':
|
|
|
|
raise Exception('Worker task not performing as expected!')
|
|
|
|
processed.append(data["type"])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _log_problem(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
|
|
|
|
# keep the tests quiet
|
|
|
|
pass
|
|
|
|
|
|
|
|
fake_client = self.FakeClient()
|
|
|
|
for msg in ['good', 'fine', 'unexpected behaviour', 'back to normal']:
|
|
|
|
fake_client.queue.append(('unreliable_worker', {'type': msg}))
|
|
|
|
|
|
|
|
fn = os.path.join(settings.QUEUE_ERROR_DIR, 'unreliable_worker.errors')
|
|
|
|
try:
|
|
|
|
os.remove(fn)
|
|
|
|
except OSError: # nocoverage # error handling for the directory not existing
|
|
|
|
pass
|
|
|
|
|
|
|
|
with simulated_queue_client(lambda: fake_client):
|
|
|
|
worker = UnreliableWorker()
|
|
|
|
worker.setup()
|
|
|
|
worker.start()
|
|
|
|
|
|
|
|
self.assertEqual(processed, ['good', 'fine', 'back to normal'])
|
|
|
|
line = open(fn).readline().strip()
|
|
|
|
event = ujson.loads(line.split('\t')[1])
|
|
|
|
self.assertEqual(event["type"], 'unexpected behaviour')
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_worker_noname(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
class TestWorker(queue_processors.QueueProcessingWorker):
|
2017-11-05 10:51:25 +01:00
|
|
|
def __init__(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__()
|
2017-03-08 12:18:27 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def consume(self, data: Mapping[str, Any]) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
pass # nocoverage # this is intentionally not called
|
|
|
|
with self.assertRaises(queue_processors.WorkerDeclarationException):
|
|
|
|
TestWorker()
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_worker_noconsume(self) -> None:
|
2017-03-08 12:18:27 +01:00
|
|
|
@queue_processors.assign_queue('test_worker')
|
|
|
|
class TestWorker(queue_processors.QueueProcessingWorker):
|
2017-11-05 10:51:25 +01:00
|
|
|
def __init__(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__()
|
2017-03-08 12:18:27 +01:00
|
|
|
|
|
|
|
with self.assertRaises(queue_processors.WorkerDeclarationException):
|
|
|
|
worker = TestWorker()
|
|
|
|
worker.consume({})
|
2017-11-10 15:00:45 +01:00
|
|
|
|
|
|
|
def test_get_active_worker_queues(self) -> None:
|
|
|
|
worker_queue_count = (len(QueueProcessingWorker.__subclasses__()) +
|
|
|
|
len(LoopQueueProcessingWorker.__subclasses__()) - 1)
|
|
|
|
self.assertEqual(worker_queue_count, len(get_active_worker_queues()))
|
|
|
|
self.assertEqual(1, len(get_active_worker_queues(queue_type='test')))
|