2020-10-10 05:27:42 +02:00
|
|
|
from typing import Any, Dict, List
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2018-03-20 03:06:19 +01:00
|
|
|
|
2020-10-10 05:27:42 +02:00
|
|
|
import orjson
|
2018-05-16 02:39:29 +02:00
|
|
|
from django.test import override_settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from pika.exceptions import AMQPConnectionError, ConnectionClosed
|
2018-03-20 03:06:19 +01:00
|
|
|
|
2021-08-03 03:49:54 +02:00
|
|
|
from zerver.lib.queue import (
|
|
|
|
SimpleQueueClient,
|
|
|
|
TornadoQueueClient,
|
|
|
|
get_queue_client,
|
|
|
|
queue_json_publish,
|
|
|
|
)
|
2018-03-20 03:06:19 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2018-03-20 03:06:19 +01:00
|
|
|
class TestTornadoQueueClient(ZulipTestCase):
|
2021-02-12 08:20:45 +01:00
|
|
|
@mock.patch("zerver.lib.queue.ExceptionFreeTornadoConnection", autospec=True)
|
2020-12-23 21:45:16 +01:00
|
|
|
def test_on_open_closed(self, mock_cxn: mock.MagicMock) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs("zulip.queue", "WARNING") as m:
|
2021-08-03 03:57:47 +02:00
|
|
|
mock_cxn().channel.side_effect = ConnectionClosed(500, "test")
|
2020-12-23 21:45:16 +01:00
|
|
|
connection = TornadoQueueClient()
|
|
|
|
connection._on_open(mock.MagicMock())
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:zulip.queue:TornadoQueueClient couldn't open channel: connection already closed"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2018-05-16 02:39:29 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestQueueImplementation(ZulipTestCase):
|
2018-05-16 03:24:45 +02:00
|
|
|
@override_settings(USING_RABBITMQ=True)
|
|
|
|
def test_register_consumer(self) -> None:
|
|
|
|
output = []
|
|
|
|
|
|
|
|
queue_client = get_queue_client()
|
|
|
|
|
2020-10-10 05:27:42 +02:00
|
|
|
def collect(events: List[Dict[str, Any]]) -> None:
|
2021-08-03 03:49:54 +02:00
|
|
|
assert isinstance(queue_client, SimpleQueueClient)
|
2020-10-10 05:27:42 +02:00
|
|
|
assert len(events) == 1
|
|
|
|
output.append(events[0])
|
2018-05-16 03:24:45 +02:00
|
|
|
queue_client.stop_consuming()
|
|
|
|
|
|
|
|
queue_json_publish("test_suite", {"event": "my_event"})
|
|
|
|
|
2020-10-10 05:27:42 +02:00
|
|
|
queue_client.start_json_consumer("test_suite", collect)
|
2018-05-16 03:24:45 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(output, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(output[0]["event"], "my_event")
|
2018-05-16 03:24:45 +02:00
|
|
|
|
|
|
|
@override_settings(USING_RABBITMQ=True)
|
|
|
|
def test_register_consumer_nack(self) -> None:
|
|
|
|
output = []
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
queue_client = get_queue_client()
|
|
|
|
|
2020-10-10 05:27:42 +02:00
|
|
|
def collect(events: List[Dict[str, Any]]) -> None:
|
2021-08-03 03:49:54 +02:00
|
|
|
assert isinstance(queue_client, SimpleQueueClient)
|
2020-10-10 05:27:42 +02:00
|
|
|
assert len(events) == 1
|
2018-05-16 03:24:45 +02:00
|
|
|
queue_client.stop_consuming()
|
|
|
|
nonlocal count
|
|
|
|
count += 1
|
|
|
|
if count == 1:
|
|
|
|
raise Exception("Make me nack!")
|
2020-10-10 05:27:42 +02:00
|
|
|
output.append(events[0])
|
2018-05-16 03:24:45 +02:00
|
|
|
|
|
|
|
queue_json_publish("test_suite", {"event": "my_event"})
|
|
|
|
|
|
|
|
try:
|
2020-10-10 05:27:42 +02:00
|
|
|
queue_client.start_json_consumer("test_suite", collect)
|
2018-05-24 16:41:34 +02:00
|
|
|
except Exception:
|
2020-10-10 05:27:42 +02:00
|
|
|
queue_client.start_json_consumer("test_suite", collect)
|
2018-05-16 03:24:45 +02:00
|
|
|
|
|
|
|
# Confirm that we processed the event fully once
|
|
|
|
self.assertEqual(count, 2)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(output, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(output[0]["event"], "my_event")
|
2018-05-16 02:39:29 +02:00
|
|
|
|
|
|
|
@override_settings(USING_RABBITMQ=True)
|
|
|
|
def test_queue_error_json(self) -> None:
|
|
|
|
queue_client = get_queue_client()
|
2021-08-03 01:02:27 +02:00
|
|
|
assert isinstance(queue_client, SimpleQueueClient)
|
2018-05-16 02:39:29 +02:00
|
|
|
actual_publish = queue_client.publish
|
|
|
|
|
|
|
|
self.counter = 0
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def throw_connection_error_once(self_obj: Any, *args: Any, **kwargs: Any) -> None:
|
2018-05-16 02:39:29 +02:00
|
|
|
self.counter += 1
|
|
|
|
if self.counter <= 1:
|
|
|
|
raise AMQPConnectionError("test")
|
|
|
|
actual_publish(*args, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
|
|
|
"zerver.lib.queue.SimpleQueueClient.publish", throw_connection_error_once
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("zulip.queue", level="WARN") as warn_logs:
|
2018-05-16 02:39:29 +02:00
|
|
|
queue_json_publish("test_suite", {"event": "my_event"})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_logs.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["WARNING:zulip.queue:Failed to send to rabbitmq, trying to reconnect and send again"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-16 02:39:29 +02:00
|
|
|
|
2020-10-10 05:27:42 +02:00
|
|
|
assert queue_client.channel
|
2022-01-21 04:52:04 +01:00
|
|
|
method, header, message = queue_client.channel.basic_get("test_suite")
|
|
|
|
assert method is not None
|
|
|
|
assert method.delivery_tag is not None
|
|
|
|
assert message is not None
|
|
|
|
queue_client.channel.basic_ack(method.delivery_tag)
|
2020-10-10 05:27:42 +02:00
|
|
|
result = orjson.loads(message)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["event"], "my_event")
|
2020-10-10 05:27:42 +02:00
|
|
|
|
2022-01-21 04:52:04 +01:00
|
|
|
method, header, message = queue_client.channel.basic_get("test_suite")
|
|
|
|
assert message is None
|
2018-05-16 02:39:29 +02:00
|
|
|
|
|
|
|
@override_settings(USING_RABBITMQ=True)
|
2021-07-15 13:57:02 +02:00
|
|
|
def setUp(self) -> None:
|
2018-05-16 02:39:29 +02:00
|
|
|
queue_client = get_queue_client()
|
2020-10-09 22:54:38 +02:00
|
|
|
assert queue_client.channel
|
2022-01-21 04:45:47 +01:00
|
|
|
queue_client.channel.queue_declare("test_suite", durable=True)
|
|
|
|
queue_client.channel.queue_purge("test_suite")
|
2021-07-15 13:57:02 +02:00
|
|
|
super().setUp()
|