2022-03-18 08:34:10 +01:00
|
|
|
import asyncio
|
2020-06-11 00:54:34 +02:00
|
|
|
import urllib.parse
|
2022-03-18 08:34:10 +01:00
|
|
|
from functools import wraps
|
2022-06-24 10:03:36 +02:00
|
|
|
from typing import Any, Awaitable, Callable, Dict, Optional, TypeVar
|
2022-06-09 01:12:19 +02:00
|
|
|
from unittest import TestResult, mock
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2022-03-18 08:34:10 +01:00
|
|
|
from asgiref.sync import async_to_sync, sync_to_async
|
2016-11-16 12:37:26 +01:00
|
|
|
from django.conf import settings
|
2017-05-25 11:50:13 +02:00
|
|
|
from django.core import signals
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db import close_old_connections
|
2017-10-28 01:17:38 +02:00
|
|
|
from django.test import override_settings
|
dependencies: Remove WebSockets system for sending messages.
Zulip has had a small use of WebSockets (specifically, for the code
path of sending messages, via the webapp only) since ~2013. We
originally added this use of WebSockets in the hope that the latency
benefits of doing so would allow us to avoid implementing a markdown
local echo; they were not. Further, HTTP/2 may have eliminated the
latency difference we hoped to exploit by using WebSockets in any
case.
While we’d originally imagined using WebSockets for other endpoints,
there was never a good justification for moving more components to the
WebSockets system.
This WebSockets code path had a lot of downsides/complexity,
including:
* The messy hack involving constructing an emulated request object to
hook into doing Django requests.
* The `message_senders` queue processor system, which increases RAM
needs and must be provisioned independently from the rest of the
server).
* A duplicate check_send_receive_time Nagios test specific to
WebSockets.
* The requirement for users to have their firewalls/NATs allow
WebSocket connections, and a setting to disable them for networks
where WebSockets don’t work.
* Dependencies on the SockJS family of libraries, which has at times
been poorly maintained, and periodically throws random JavaScript
exceptions in our production environments without a deep enough
traceback to effectively investigate.
* A total of about 1600 lines of our code related to the feature.
* Increased load on the Tornado system, especially around a Zulip
server restart, and especially for large installations like
zulipchat.com, resulting in extra delay before messages can be sent
again.
As detailed in
https://github.com/zulip/zulip/pull/12862#issuecomment-536152397, it
appears that removing WebSockets moderately increases the time it
takes for the `send_message` API query to return from the server, but
does not significantly change the time between when a message is sent
and when it is received by clients. We don’t understand the reason
for that change (suggesting the possibility of a measurement error),
and even if it is a real change, we consider that potential small
latency regression to be acceptable.
If we later want WebSockets, we’ll likely want to just use Django
Channels.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2019-07-23 01:43:40 +02:00
|
|
|
from tornado.httpclient import HTTPResponse
|
2022-03-18 08:34:10 +01:00
|
|
|
from tornado.ioloop import IOLoop
|
2022-03-17 21:42:25 +01:00
|
|
|
from tornado.platform.asyncio import AsyncIOMainLoop
|
2022-03-18 08:34:10 +01:00
|
|
|
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase
|
2016-11-16 12:37:26 +01:00
|
|
|
from tornado.web import Application
|
2022-03-18 08:34:10 +01:00
|
|
|
from typing_extensions import ParamSpec
|
2016-11-16 12:37:26 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2017-05-25 11:50:13 +02:00
|
|
|
from zerver.tornado import event_queue
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.tornado.application import create_tornado_application
|
dependencies: Remove WebSockets system for sending messages.
Zulip has had a small use of WebSockets (specifically, for the code
path of sending messages, via the webapp only) since ~2013. We
originally added this use of WebSockets in the hope that the latency
benefits of doing so would allow us to avoid implementing a markdown
local echo; they were not. Further, HTTP/2 may have eliminated the
latency difference we hoped to exploit by using WebSockets in any
case.
While we’d originally imagined using WebSockets for other endpoints,
there was never a good justification for moving more components to the
WebSockets system.
This WebSockets code path had a lot of downsides/complexity,
including:
* The messy hack involving constructing an emulated request object to
hook into doing Django requests.
* The `message_senders` queue processor system, which increases RAM
needs and must be provisioned independently from the rest of the
server).
* A duplicate check_send_receive_time Nagios test specific to
WebSockets.
* The requirement for users to have their firewalls/NATs allow
WebSocket connections, and a setting to disable them for networks
where WebSockets don’t work.
* Dependencies on the SockJS family of libraries, which has at times
been poorly maintained, and periodically throws random JavaScript
exceptions in our production environments without a deep enough
traceback to effectively investigate.
* A total of about 1600 lines of our code related to the feature.
* Increased load on the Tornado system, especially around a Zulip
server restart, and especially for large installations like
zulipchat.com, resulting in extra delay before messages can be sent
again.
As detailed in
https://github.com/zulip/zulip/pull/12862#issuecomment-536152397, it
appears that removing WebSockets moderately increases the time it
takes for the `send_message` API query to return from the server, but
does not significantly change the time between when a message is sent
and when it is received by clients. We don’t understand the reason
for that change (suggesting the possibility of a measurement error),
and even if it is a real change, we consider that potential small
latency regression to be acceptable.
If we later want WebSockets, we’ll likely want to just use Django
Channels.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2019-07-23 01:43:40 +02:00
|
|
|
from zerver.tornado.event_queue import process_event
|
2016-11-16 12:37:26 +01:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
P = ParamSpec("P")
|
|
|
|
T = TypeVar("T")
|
|
|
|
|
|
|
|
|
|
|
|
def async_to_sync_decorator(f: Callable[P, Awaitable[T]]) -> Callable[P, T]:
|
|
|
|
@wraps(f)
|
|
|
|
def wrapped(*args: P.args, **kwargs: P.kwargs) -> T:
|
|
|
|
return async_to_sync(f)(*args, **kwargs)
|
|
|
|
|
|
|
|
return wrapped
|
|
|
|
|
|
|
|
|
|
|
|
async def in_django_thread(f: Callable[[], T]) -> T:
|
|
|
|
return await asyncio.create_task(sync_to_async(f)())
|
|
|
|
|
2016-11-16 12:37:26 +01:00
|
|
|
|
2017-05-25 11:50:13 +02:00
|
|
|
class TornadoWebTestCase(AsyncHTTPTestCase, ZulipTestCase):
|
2022-03-18 08:34:10 +01:00
|
|
|
@async_to_sync_decorator
|
|
|
|
async def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-05-25 11:50:13 +02:00
|
|
|
signals.request_started.disconnect(close_old_connections)
|
|
|
|
signals.request_finished.disconnect(close_old_connections)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.session_cookie: Optional[Dict[str, str]] = None
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
@async_to_sync_decorator
|
|
|
|
async def tearDown(self) -> None:
|
|
|
|
# Skip tornado.testing.AsyncTestCase.tearDown because it tries to kill
|
|
|
|
# the current task.
|
|
|
|
super(AsyncTestCase, self).tearDown()
|
|
|
|
|
|
|
|
def run(self, result: Optional[TestResult] = None) -> Optional[TestResult]:
|
|
|
|
return async_to_sync(
|
|
|
|
sync_to_async(super().run, thread_sensitive=False), force_new_loop=True
|
|
|
|
)(result)
|
|
|
|
|
|
|
|
def get_new_ioloop(self) -> IOLoop:
|
|
|
|
return AsyncIOMainLoop()
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2017-10-28 01:17:38 +02:00
|
|
|
@override_settings(DEBUG=False)
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_app(self) -> Application:
|
2020-09-16 22:17:31 +02:00
|
|
|
return create_tornado_application()
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
async def tornado_client_get(self, path: str, **kwargs: Any) -> HTTPResponse:
|
2017-05-25 11:50:13 +02:00
|
|
|
self.add_session_cookie(kwargs)
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(kwargs, skip_user_agent=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
if "HTTP_HOST" in kwargs:
|
|
|
|
kwargs["headers"]["Host"] = kwargs["HTTP_HOST"]
|
|
|
|
del kwargs["HTTP_HOST"]
|
2022-03-17 21:42:25 +01:00
|
|
|
return await self.http_client.fetch(self.get_url(path), method="GET", **kwargs)
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
async def fetch_async(self, method: str, path: str, **kwargs: Any) -> HTTPResponse:
|
2017-05-25 11:50:13 +02:00
|
|
|
self.add_session_cookie(kwargs)
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(kwargs, skip_user_agent=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
if "HTTP_HOST" in kwargs:
|
|
|
|
kwargs["headers"]["Host"] = kwargs["HTTP_HOST"]
|
|
|
|
del kwargs["HTTP_HOST"]
|
2022-03-17 21:42:25 +01:00
|
|
|
return await self.http_client.fetch(self.get_url(path), method=method, **kwargs)
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
async def client_get_async(self, path: str, **kwargs: Any) -> HTTPResponse:
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(kwargs, skip_user_agent=True)
|
2022-03-18 08:34:10 +01:00
|
|
|
return await self.fetch_async("GET", path, **kwargs)
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
def login_user(self, *args: Any, **kwargs: Any) -> None:
|
|
|
|
super().login_user(*args, **kwargs)
|
2017-05-25 11:50:13 +02:00
|
|
|
session_cookie = settings.SESSION_COOKIE_NAME
|
|
|
|
session_key = self.client.session.session_key
|
|
|
|
self.session_cookie = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"Cookie": f"{session_cookie}={session_key}",
|
2017-05-25 11:50:13 +02:00
|
|
|
}
|
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_session_cookie(self) -> Dict[str, str]:
|
2017-05-25 11:50:13 +02:00
|
|
|
return {} if self.session_cookie is None else self.session_cookie
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def add_session_cookie(self, kwargs: Dict[str, Any]) -> None:
|
2017-05-25 11:50:13 +02:00
|
|
|
# TODO: Currently only allows session cookie
|
2021-02-12 08:20:45 +01:00
|
|
|
headers = kwargs.get("headers", {})
|
2017-05-25 11:50:13 +02:00
|
|
|
headers.update(self.get_session_cookie())
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["headers"] = headers
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
async def create_queue(self, **kwargs: Any) -> str:
|
|
|
|
response = await self.tornado_client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/events?dont_block=true",
|
2020-08-07 04:45:55 +02:00
|
|
|
subdomain="zulip",
|
|
|
|
)
|
2017-05-25 11:50:13 +02:00
|
|
|
self.assertEqual(response.code, 200)
|
2020-08-07 01:09:47 +02:00
|
|
|
body = orjson.loads(response.body)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(body["events"], [])
|
|
|
|
self.assertIn("queue_id", body)
|
|
|
|
return body["queue_id"]
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-25 11:50:13 +02:00
|
|
|
class EventsTestCase(TornadoWebTestCase):
|
2022-03-18 08:34:10 +01:00
|
|
|
@async_to_sync_decorator
|
|
|
|
async def test_create_queue(self) -> None:
|
|
|
|
await in_django_thread(lambda: self.login_user(self.example_user("hamlet")))
|
|
|
|
queue_id = await self.create_queue()
|
2017-05-25 11:50:13 +02:00
|
|
|
self.assertIn(queue_id, event_queue.clients)
|
|
|
|
|
2022-03-18 08:34:10 +01:00
|
|
|
@async_to_sync_decorator
|
|
|
|
async def test_events_async(self) -> None:
|
|
|
|
user_profile = await in_django_thread(lambda: self.example_user("hamlet"))
|
|
|
|
await in_django_thread(lambda: self.login_user(user_profile))
|
|
|
|
event_queue_id = await self.create_queue()
|
2017-05-25 11:50:13 +02:00
|
|
|
data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"queue_id": event_queue_id,
|
|
|
|
"last_event_id": -1,
|
2017-05-25 11:50:13 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
path = f"/json/events?{urllib.parse.urlencode(data)}"
|
2017-05-25 11:50:13 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def process_events() -> None:
|
2017-05-25 11:50:13 +02:00
|
|
|
users = [user_profile.id]
|
|
|
|
event = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="test",
|
|
|
|
data="test data",
|
2017-05-25 11:50:13 +02:00
|
|
|
)
|
|
|
|
process_event(event, users)
|
|
|
|
|
2022-06-24 10:03:36 +02:00
|
|
|
def wrapped_fetch_events(**query: Any) -> Dict[str, Any]:
|
|
|
|
ret = event_queue.fetch_events(**query)
|
2022-06-09 01:12:19 +02:00
|
|
|
self.io_loop.add_callback(process_events)
|
|
|
|
return ret
|
|
|
|
|
|
|
|
with mock.patch("zerver.tornado.views.fetch_events", side_effect=wrapped_fetch_events):
|
|
|
|
response = await self.client_get_async(path)
|
|
|
|
|
2021-04-02 23:05:07 +02:00
|
|
|
self.assertEqual(response.headers["Vary"], "Accept-Language, Cookie")
|
2020-08-07 01:09:47 +02:00
|
|
|
data = orjson.loads(response.body)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
data["events"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
{"type": "test", "data": "test data", "id": 0},
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["result"], "success")
|