2020-06-29 13:11:26 +02:00
|
|
|
import time
|
2021-05-27 16:25:23 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Mapping, Optional
|
2020-06-29 13:11:26 +02:00
|
|
|
from unittest import mock
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-29 13:11:26 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.http import HttpRequest, HttpResponse
|
2021-05-05 13:15:13 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-04-18 11:28:39 +02:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_VERSION
|
2021-05-05 13:15:13 +02:00
|
|
|
from zerver.lib.actions import (
|
|
|
|
check_send_message,
|
|
|
|
do_change_user_role,
|
|
|
|
do_set_realm_property,
|
|
|
|
do_update_user_presence,
|
|
|
|
)
|
2021-03-13 22:32:51 +01:00
|
|
|
from zerver.lib.event_schema import check_restart_event
|
2020-06-29 13:11:26 +02:00
|
|
|
from zerver.lib.events import fetch_initial_state_data, get_raw_user_data
|
2021-07-04 08:45:34 +02:00
|
|
|
from zerver.lib.exceptions import AccessDeniedError
|
2020-06-29 13:11:26 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2021-02-07 21:34:01 +01:00
|
|
|
from zerver.lib.test_helpers import HostRequestMock, queries_captured, stub_event_queue_user_events
|
2020-06-29 13:11:26 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
|
|
|
from zerver.models import (
|
|
|
|
Realm,
|
|
|
|
UserMessage,
|
2021-05-05 13:15:13 +02:00
|
|
|
UserPresence,
|
2020-06-29 13:11:26 +02:00
|
|
|
UserProfile,
|
|
|
|
flush_per_request_caches,
|
|
|
|
get_client,
|
|
|
|
get_realm,
|
|
|
|
get_stream,
|
|
|
|
get_system_bot,
|
|
|
|
)
|
|
|
|
from zerver.tornado.event_queue import (
|
|
|
|
allocate_client_descriptor,
|
|
|
|
clear_client_event_queues_for_testing,
|
|
|
|
get_client_info_for_message_event,
|
|
|
|
process_message_event,
|
2021-03-21 15:12:24 +01:00
|
|
|
send_restart_events,
|
2020-06-29 13:11:26 +02:00
|
|
|
)
|
2021-04-29 12:54:30 +02:00
|
|
|
from zerver.tornado.views import get_events, get_events_backend
|
|
|
|
from zerver.views.events_register import (
|
|
|
|
_default_all_public_streams,
|
|
|
|
_default_narrow,
|
|
|
|
events_register_backend,
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
|
|
|
|
class EventsEndpointTest(ZulipTestCase):
|
|
|
|
def test_events_register_endpoint(self) -> None:
|
|
|
|
|
|
|
|
# This test is intended to get minimal coverage on the
|
|
|
|
# events_register code paths
|
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.views.events_register.do_events_register", return_value={}):
|
|
|
|
result = self.api_post(user, "/json/register")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.events.request_event_queue", return_value=None):
|
|
|
|
result = self.api_post(user, "/json/register")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_error(result, "Could not allocate event queue")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return_event_queue = "15:11"
|
2020-06-29 13:11:26 +02:00
|
|
|
return_user_events: List[Dict[str, Any]] = []
|
|
|
|
|
|
|
|
# We choose realm_emoji somewhat randomly--we want
|
|
|
|
# a "boring" event type for the purpose of this test.
|
2021-02-12 08:20:45 +01:00
|
|
|
event_type = "realm_emoji"
|
2021-02-12 08:19:30 +01:00
|
|
|
test_event = dict(id=6, type=event_type, realm_emoji=[])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Test that call is made to deal with a returning soft deactivated user.
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.events.reactivate_user_if_soft_deactivated") as fa:
|
2020-06-29 13:11:26 +02:00
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
user, "/json/register", dict(event_types=orjson.dumps([event_type]).decode())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertEqual(fa.call_count, 1)
|
|
|
|
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
user, "/json/register", dict(event_types=orjson.dumps([event_type]).decode())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["last_event_id"], -1)
|
|
|
|
self.assertEqual(result_dict["queue_id"], "15:11")
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Now start simulating returning actual data
|
2021-02-12 08:20:45 +01:00
|
|
|
return_event_queue = "15:12"
|
2020-06-29 13:11:26 +02:00
|
|
|
return_user_events = [test_event]
|
|
|
|
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
user, "/json/register", dict(event_types=orjson.dumps([event_type]).decode())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["last_event_id"], 6)
|
|
|
|
self.assertEqual(result_dict["queue_id"], "15:12")
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# sanity check the data relevant to our event
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["realm_emoji"], [])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Now test with `fetch_event_types` not matching the event
|
2021-02-12 08:20:45 +01:00
|
|
|
return_event_queue = "15:13"
|
2020-06-29 13:11:26 +02:00
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
|
|
|
user,
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/register",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(
|
|
|
|
event_types=orjson.dumps([event_type]).decode(),
|
2021-02-12 08:20:45 +01:00
|
|
|
fetch_event_types=orjson.dumps(["message"]).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["last_event_id"], 6)
|
2020-06-29 13:11:26 +02:00
|
|
|
# Check that the message event types data is in there
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("max_message_id", result_dict)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Check that our original event type is not there.
|
|
|
|
self.assertNotIn(event_type, result_dict)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["queue_id"], "15:13")
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Now test with `fetch_event_types` matching the event
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
|
|
|
user,
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/register",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(
|
|
|
|
fetch_event_types=orjson.dumps([event_type]).decode(),
|
2021-02-12 08:20:45 +01:00
|
|
|
event_types=orjson.dumps(["message"]).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result_dict["last_event_id"], 6)
|
2020-06-29 13:11:26 +02:00
|
|
|
# Check that we didn't fetch the messages data
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertNotIn("max_message_id", result_dict)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Check that the realm_emoji data is in there.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("realm_emoji", result_dict)
|
|
|
|
self.assertEqual(result_dict["realm_emoji"], [])
|
|
|
|
self.assertEqual(result_dict["queue_id"], "15:13")
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-04-08 22:14:31 +02:00
|
|
|
def test_events_register_endpoint_all_public_streams_access(self) -> None:
|
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
normal_user = self.example_user("hamlet")
|
|
|
|
self.assertEqual(guest_user.role, UserProfile.ROLE_GUEST)
|
|
|
|
self.assertEqual(normal_user.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
with mock.patch("zerver.views.events_register.do_events_register", return_value={}):
|
|
|
|
result = self.api_post(normal_user, "/json/register", dict(all_public_streams="true"))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
with mock.patch("zerver.views.events_register.do_events_register", return_value={}):
|
|
|
|
result = self.api_post(guest_user, "/json/register", dict(all_public_streams="true"))
|
|
|
|
self.assert_json_error(result, "User not authorized for this query")
|
|
|
|
|
|
|
|
def test_events_get_events_endpoint_guest_cant_use_all_public_streams_param(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is meant to execute the very beginning of the codepath
|
|
|
|
to ensure guest users are immediately disallowed to use the
|
|
|
|
all_public_streams param. Deeper testing is hard (and not necessary for this case)
|
|
|
|
due to the codepath expecting AsyncDjangoHandler to be attached to the request,
|
|
|
|
which doesn't happen in our test setup.
|
|
|
|
"""
|
|
|
|
|
|
|
|
guest_user = self.example_user("polonius")
|
|
|
|
self.assertEqual(guest_user.role, UserProfile.ROLE_GUEST)
|
|
|
|
|
|
|
|
result = self.api_get(guest_user, "/api/v1/events", dict(all_public_streams="true"))
|
|
|
|
self.assert_json_error(result, "User not authorized for this query")
|
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
def test_tornado_endpoint(self) -> None:
|
|
|
|
|
|
|
|
# This test is mostly intended to get minimal coverage on
|
|
|
|
# the /notify_tornado endpoint, so we can have 100% URL coverage,
|
|
|
|
# but it does exercise a little bit of the codepath.
|
|
|
|
post_data = dict(
|
2020-08-07 01:09:47 +02:00
|
|
|
data=orjson.dumps(
|
2020-06-29 13:11:26 +02:00
|
|
|
dict(
|
|
|
|
event=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="other",
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
users=[self.example_user("hamlet").id],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2020-08-07 01:09:47 +02:00
|
|
|
).decode(),
|
2020-06-29 13:11:26 +02:00
|
|
|
)
|
2021-02-07 21:34:01 +01:00
|
|
|
req = HostRequestMock(post_data, user_profile=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
req.META["REMOTE_ADDR"] = "127.0.0.1"
|
2021-07-04 08:45:34 +02:00
|
|
|
with self.assertRaises(AccessDeniedError) as context:
|
|
|
|
result = self.client_post_request("/notify_tornado", req)
|
|
|
|
self.assertEqual(str(context.exception), "Access denied")
|
|
|
|
self.assertEqual(context.exception.http_status_code, 403)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
post_data["secret"] = settings.SHARED_SECRET
|
2021-02-07 21:34:01 +01:00
|
|
|
req = HostRequestMock(post_data, user_profile=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
req.META["REMOTE_ADDR"] = "127.0.0.1"
|
|
|
|
result = self.client_post_request("/notify_tornado", req)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class GetEventsTest(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def tornado_call(
|
|
|
|
self,
|
|
|
|
view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
post_data: Dict[str, Any],
|
|
|
|
) -> HttpResponse:
|
2021-02-07 21:34:01 +01:00
|
|
|
request = HostRequestMock(post_data, user_profile)
|
2020-06-29 13:11:26 +02:00
|
|
|
return view_func(request, user_profile)
|
|
|
|
|
|
|
|
def test_get_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
email = user_profile.email
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient_user_profile = self.example_user("othello")
|
2020-06-29 13:11:26 +02:00
|
|
|
recipient_email = recipient_user_profile.email
|
|
|
|
self.login_user(user_profile)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"apply_markdown": orjson.dumps(True).decode(),
|
|
|
|
"client_gravatar": orjson.dumps(True).decode(),
|
|
|
|
"event_types": orjson.dumps(["message"]).decode(),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
queue_id = orjson.loads(result.content)["queue_id"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
recipient_user_profile,
|
|
|
|
{
|
|
|
|
"apply_markdown": orjson.dumps(True).decode(),
|
|
|
|
"client_gravatar": orjson.dumps(True).decode(),
|
|
|
|
"event_types": orjson.dumps(["message"]).decode(),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(recipient_result)
|
2020-08-07 01:09:47 +02:00
|
|
|
recipient_queue_id = orjson.loads(recipient_result.content)["queue_id"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
events = orjson.loads(result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 0)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
local_id = "10.01"
|
2020-06-29 13:11:26 +02:00
|
|
|
check_send_message(
|
|
|
|
sender=user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
client=get_client("whatever"),
|
|
|
|
message_type_name="private",
|
2020-06-29 13:11:26 +02:00
|
|
|
message_to=[recipient_email],
|
|
|
|
topic_name=None,
|
2021-02-12 08:20:45 +01:00
|
|
|
message_content="hello",
|
2020-06-29 13:11:26 +02:00
|
|
|
local_id=local_id,
|
|
|
|
sender_queue_id=queue_id,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
events = orjson.loads(result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 1)
|
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
|
|
|
|
|
|
|
|
last_event_id = events[0]["id"]
|
2021-02-12 08:20:45 +01:00
|
|
|
local_id = "10.02"
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
check_send_message(
|
|
|
|
sender=user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
client=get_client("whatever"),
|
|
|
|
message_type_name="private",
|
2020-06-29 13:11:26 +02:00
|
|
|
message_to=[recipient_email],
|
|
|
|
topic_name=None,
|
2021-02-12 08:20:45 +01:00
|
|
|
message_content="hello",
|
2020-06-29 13:11:26 +02:00
|
|
|
local_id=local_id,
|
|
|
|
sender_queue_id=queue_id,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": last_event_id,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
events = orjson.loads(result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 1)
|
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
|
|
|
|
|
|
|
# Test that the received message in the receiver's event queue
|
|
|
|
# exists and does not contain a local id
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
recipient_user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": recipient_queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
recipient_events = orjson.loads(recipient_result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(recipient_result)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(recipient_events, 2)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertEqual(recipient_events[0]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertTrue("local_message_id" not in recipient_events[0])
|
|
|
|
self.assertEqual(recipient_events[1]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
|
|
|
|
self.assertTrue("local_message_id" not in recipient_events[1])
|
|
|
|
|
|
|
|
def test_get_events_narrow(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.login_user(user_profile)
|
|
|
|
|
|
|
|
def get_message(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]:
|
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
dict(
|
2020-08-07 01:09:47 +02:00
|
|
|
apply_markdown=orjson.dumps(apply_markdown).decode(),
|
|
|
|
client_gravatar=orjson.dumps(client_gravatar).decode(),
|
|
|
|
event_types=orjson.dumps(["message"]).decode(),
|
|
|
|
narrow=orjson.dumps([["stream", "denmark"]]).decode(),
|
2020-06-29 13:11:26 +02:00
|
|
|
user_client="website",
|
2020-08-07 01:09:47 +02:00
|
|
|
dont_block=orjson.dumps(True).decode(),
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
2020-08-07 01:09:47 +02:00
|
|
|
queue_id = orjson.loads(result.content)["queue_id"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
events = orjson.loads(result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 0)
|
|
|
|
|
|
|
|
self.send_personal_message(user_profile, self.example_user("othello"), "hello")
|
|
|
|
self.send_stream_message(user_profile, "Denmark", "**hello**")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.tornado_call(
|
|
|
|
get_events,
|
|
|
|
user_profile,
|
|
|
|
{
|
|
|
|
"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
)
|
2020-08-07 01:09:47 +02:00
|
|
|
events = orjson.loads(result.content)["events"]
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 1)
|
|
|
|
self.assertEqual(events[0]["type"], "message")
|
2021-02-12 08:20:45 +01:00
|
|
|
return events[0]["message"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
message = get_message(apply_markdown=False, client_gravatar=False)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "**hello**")
|
|
|
|
self.assertTrue(message["avatar_url"].startswith("https://secure.gravatar.com"))
|
|
|
|
|
|
|
|
message = get_message(apply_markdown=True, client_gravatar=False)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("gravatar.com", message["avatar_url"])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
message = get_message(apply_markdown=False, client_gravatar=True)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "**hello**")
|
|
|
|
self.assertEqual(message["avatar_url"], None)
|
|
|
|
|
|
|
|
message = get_message(apply_markdown=True, client_gravatar=True)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
|
|
|
|
self.assertEqual(message["avatar_url"], None)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class FetchInitialStateDataTest(ZulipTestCase):
|
|
|
|
# Non-admin users don't have access to all bots
|
|
|
|
def test_realm_bots_non_admin(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("cordelia")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_admin)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_length(result["realm_bots"], 0)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# additionally the API key for a random bot is not present in the data
|
|
|
|
api_key = get_api_key(self.notification_bot())
|
|
|
|
self.assertNotIn(api_key, str(result))
|
|
|
|
|
|
|
|
# Admin users have access to all bots in the realm_bots field
|
|
|
|
def test_realm_bots_admin(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(len(result["realm_bots"]) > 2)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_max_message_id_with_no_history(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("aaron")
|
2020-06-29 13:11:26 +02:00
|
|
|
# Delete all historical messages for this user
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["max_message_id"], -1)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_delivery_email_presence_for_non_admins(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("aaron")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_admin)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
user_profile.realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for key, value in result["raw_users"].items():
|
|
|
|
self.assertNotIn("delivery_email", value)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
user_profile.realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for key, value in result["raw_users"].items():
|
|
|
|
self.assertNotIn("delivery_email", value)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_delivery_email_presence_for_admins(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("iago")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
user_profile.realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
for key, value in result["raw_users"].items():
|
|
|
|
self.assertNotIn("delivery_email", value)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
user_profile.realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
for key, value in result["raw_users"].items():
|
|
|
|
self.assertIn("delivery_email", value)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_user_avatar_url_field_optional(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
users = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("iago"),
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("ZOE"),
|
|
|
|
self.example_user("othello"),
|
2020-06-29 13:11:26 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for user in users:
|
|
|
|
user.long_term_idle = True
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
long_term_idle_users_ids = [user.id for user in users]
|
|
|
|
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
user_avatar_url_field_optional=True,
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
raw_users = result["raw_users"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
for user_dict in raw_users.values():
|
2021-02-12 08:20:45 +01:00
|
|
|
if user_dict["user_id"] in long_term_idle_users_ids:
|
|
|
|
self.assertFalse("avatar_url" in user_dict)
|
2020-06-29 13:11:26 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIsNotNone(user_dict["avatar_url"])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
gravatar_users_id = [
|
2021-02-12 08:20:45 +01:00
|
|
|
user_dict["user_id"]
|
2021-02-12 08:19:30 +01:00
|
|
|
for user_dict in raw_users.values()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "avatar_url" in user_dict and "gravatar.com" in user_dict["avatar_url"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
# Test again with client_gravatar = True
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
client_gravatar=True,
|
|
|
|
user_avatar_url_field_optional=True,
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
raw_users = result["raw_users"]
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
for user_dict in raw_users.values():
|
2021-02-12 08:20:45 +01:00
|
|
|
if user_dict["user_id"] in gravatar_users_id:
|
|
|
|
self.assertIsNone(user_dict["avatar_url"])
|
2020-06-29 13:11:26 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse("avatar_url" in user_dict)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class ClientDescriptorsTest(ZulipTestCase):
|
|
|
|
def test_get_client_info_for_all_public_streams(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=True,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
client_type_name="website",
|
|
|
|
event_types=["message"],
|
2020-06-29 13:11:26 +02:00
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name="whatever",
|
2020-06-29 13:11:26 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[],
|
|
|
|
)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(client_info, 1)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
dct = client_info[client.event_queue.id]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(dct["client"].apply_markdown, True)
|
|
|
|
self.assertEqual(dct["client"].client_gravatar, True)
|
|
|
|
self.assertEqual(dct["client"].user_profile_id, hamlet.id)
|
|
|
|
self.assertEqual(dct["flags"], [])
|
|
|
|
self.assertEqual(dct["is_sender"], False)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name="whatever",
|
2020-06-29 13:11:26 +02:00
|
|
|
sender_queue_id=client.event_queue.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[],
|
|
|
|
)
|
|
|
|
dct = client_info[client.event_queue.id]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(dct["is_sender"], True)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_get_client_info_for_normal_users(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-06-29 13:11:26 +02:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
def test_get_info(apply_markdown: bool, client_gravatar: bool) -> None:
|
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=False,
|
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
client_gravatar=client_gravatar,
|
2021-02-12 08:20:45 +01:00
|
|
|
client_type_name="website",
|
|
|
|
event_types=["message"],
|
2020-06-29 13:11:26 +02:00
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_name="whatever",
|
2020-06-29 13:11:26 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[
|
|
|
|
dict(id=cordelia.id),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(client_info, 0)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[
|
|
|
|
dict(id=cordelia.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(id=hamlet.id, flags=["mentioned"]),
|
2020-06-29 13:11:26 +02:00
|
|
|
],
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(client_info, 1)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
dct = client_info[client.event_queue.id]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(dct["client"].apply_markdown, apply_markdown)
|
|
|
|
self.assertEqual(dct["client"].client_gravatar, client_gravatar)
|
|
|
|
self.assertEqual(dct["client"].user_profile_id, hamlet.id)
|
|
|
|
self.assertEqual(dct["flags"], ["mentioned"])
|
|
|
|
self.assertEqual(dct["is_sender"], False)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
test_get_info(apply_markdown=False, client_gravatar=False)
|
|
|
|
test_get_info(apply_markdown=True, client_gravatar=False)
|
|
|
|
|
|
|
|
test_get_info(apply_markdown=False, client_gravatar=True)
|
|
|
|
test_get_info(apply_markdown=True, client_gravatar=True)
|
|
|
|
|
|
|
|
def test_process_message_event_with_mocked_client_info(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
|
|
|
|
class MockClient:
|
2021-02-12 08:19:30 +01:00
|
|
|
def __init__(
|
|
|
|
self, user_profile_id: int, apply_markdown: bool, client_gravatar: bool
|
|
|
|
) -> None:
|
2020-06-29 13:11:26 +02:00
|
|
|
self.user_profile_id = user_profile_id
|
|
|
|
self.apply_markdown = apply_markdown
|
|
|
|
self.client_gravatar = client_gravatar
|
2021-02-12 08:20:45 +01:00
|
|
|
self.client_type_name = "whatever"
|
2020-06-29 13:11:26 +02:00
|
|
|
self.events: List[Dict[str, Any]] = []
|
|
|
|
|
|
|
|
def accepts_messages(self) -> bool:
|
|
|
|
return True
|
|
|
|
|
|
|
|
def accepts_event(self, event: Dict[str, Any]) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
assert event["type"] == "message"
|
2020-06-29 13:11:26 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
def add_event(self, event: Dict[str, Any]) -> None:
|
|
|
|
self.events.append(event)
|
|
|
|
|
|
|
|
client1 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
client2 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
client3 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
client4 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
client_info = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"client:1": dict(
|
2020-06-29 13:11:26 +02:00
|
|
|
client=client1,
|
2021-02-12 08:20:45 +01:00
|
|
|
flags=["starred"],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"client:2": dict(
|
2020-06-29 13:11:26 +02:00
|
|
|
client=client2,
|
2021-02-12 08:20:45 +01:00
|
|
|
flags=["has_alert_word"],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"client:3": dict(
|
2020-06-29 13:11:26 +02:00
|
|
|
client=client3,
|
|
|
|
flags=[],
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"client:4": dict(
|
2020-06-29 13:11:26 +02:00
|
|
|
client=client4,
|
|
|
|
flags=[],
|
|
|
|
),
|
|
|
|
}
|
|
|
|
|
|
|
|
sender = hamlet
|
|
|
|
|
|
|
|
message_event = dict(
|
|
|
|
message_dict=dict(
|
|
|
|
id=999,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="**hello**",
|
|
|
|
rendered_content="<b>hello</b>",
|
2020-06-29 13:11:26 +02:00
|
|
|
sender_id=sender.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
|
|
|
client="website",
|
2020-06-29 13:11:26 +02:00
|
|
|
# NOTE: Some of these fields are clutter, but some
|
|
|
|
# will be useful when we let clients specify
|
|
|
|
# that they can compute their own gravatar URLs.
|
|
|
|
sender_email=sender.email,
|
|
|
|
sender_delivery_email=sender.delivery_email,
|
|
|
|
sender_realm_id=sender.realm_id,
|
|
|
|
sender_avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
sender_avatar_version=1,
|
|
|
|
sender_is_mirror_dummy=None,
|
|
|
|
recipient_type=None,
|
|
|
|
recipient_type_id=None,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Setting users to `[]` bypasses code we don't care about
|
|
|
|
# for this test--we assume client_info is correct in our mocks,
|
|
|
|
# and we are interested in how messages are put on event queue.
|
|
|
|
users: List[Dict[str, Any]] = []
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.tornado.event_queue.get_client_info_for_message_event", return_value=client_info
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2020-06-29 13:11:26 +02:00
|
|
|
process_message_event(message_event, users)
|
|
|
|
|
|
|
|
# We are not closely examining avatar_url at this point, so
|
|
|
|
# just sanity check them and then delete the keys so that
|
|
|
|
# upcoming comparisons work.
|
|
|
|
for client in [client1, client2]:
|
2021-02-12 08:20:45 +01:00
|
|
|
message = client.events[0]["message"]
|
|
|
|
self.assertIn("gravatar.com", message["avatar_url"])
|
|
|
|
message.pop("avatar_url")
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
client1.events,
|
|
|
|
[
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="message",
|
2021-02-12 08:19:30 +01:00
|
|
|
message=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
2021-02-12 08:19:30 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
id=999,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="<b>hello</b>",
|
|
|
|
content_type="text/html",
|
|
|
|
client="website",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
flags=["starred"],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
client2.events,
|
|
|
|
[
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="message",
|
2021-02-12 08:19:30 +01:00
|
|
|
message=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
2021-02-12 08:19:30 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
id=999,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="**hello**",
|
|
|
|
content_type="text/x-markdown",
|
|
|
|
client="website",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
flags=["has_alert_word"],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
client3.events,
|
|
|
|
[
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="message",
|
2021-02-12 08:19:30 +01:00
|
|
|
message=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
2021-02-12 08:19:30 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
avatar_url=None,
|
|
|
|
id=999,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="<b>hello</b>",
|
|
|
|
content_type="text/html",
|
|
|
|
client="website",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
flags=[],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
client4.events,
|
|
|
|
[
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="message",
|
2021-02-12 08:19:30 +01:00
|
|
|
message=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
2021-02-12 08:19:30 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
avatar_url=None,
|
|
|
|
id=999,
|
2021-02-12 08:20:45 +01:00
|
|
|
content="**hello**",
|
|
|
|
content_type="text/x-markdown",
|
|
|
|
client="website",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
flags=[],
|
2020-06-29 13:11:26 +02:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-03-21 15:12:24 +01:00
|
|
|
class RestartEventsTest(ZulipTestCase):
|
2021-04-29 12:54:30 +02:00
|
|
|
def tornado_call(
|
|
|
|
self,
|
|
|
|
view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
post_data: Dict[str, Any],
|
|
|
|
client_name: Optional[str] = None,
|
|
|
|
) -> HttpResponse:
|
|
|
|
request = HostRequestMock(post_data, user_profile, client_name=client_name)
|
|
|
|
return view_func(request, user_profile)
|
|
|
|
|
2021-03-21 15:12:24 +01:00
|
|
|
def test_restart(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=False,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
client_type_name="website",
|
|
|
|
event_types=None,
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
|
|
|
|
send_restart_events(immediate=True)
|
|
|
|
|
|
|
|
# For now we only verify that a virtual event
|
|
|
|
# gets added to the client's event_queue. We
|
|
|
|
# may decide to write a deeper test in the future
|
|
|
|
# that exercises the finish_handler.
|
|
|
|
virtual_events = client.event_queue.virtual_events
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(virtual_events, 1)
|
2021-03-21 15:12:24 +01:00
|
|
|
restart_event = virtual_events["restart"]
|
|
|
|
|
2021-03-13 22:32:51 +01:00
|
|
|
check_restart_event("restart_event", restart_event)
|
2021-03-21 15:12:24 +01:00
|
|
|
self.assertEqual(
|
|
|
|
restart_event,
|
|
|
|
dict(
|
|
|
|
type="restart",
|
2021-04-18 11:28:39 +02:00
|
|
|
zulip_version=ZULIP_VERSION,
|
|
|
|
zulip_feature_level=API_FEATURE_LEVEL,
|
2021-03-21 15:12:24 +01:00
|
|
|
server_generation=settings.SERVER_GENERATION,
|
|
|
|
immediate=True,
|
|
|
|
id=0,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2021-04-29 12:54:30 +02:00
|
|
|
def test_restart_event_recursive_call_logic(self) -> None:
|
|
|
|
# This is a test for a subtle corner case; see the comments
|
|
|
|
# around RestartEventError for details.
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
# Setup an empty event queue
|
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=False,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
client_type_name="website",
|
|
|
|
event_types=None,
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
|
|
|
|
# Add a restart event to it.
|
|
|
|
send_restart_events(immediate=True)
|
|
|
|
|
|
|
|
# Make a second queue after the restart events were sent.
|
|
|
|
second_client = allocate_client_descriptor(queue_data)
|
|
|
|
|
|
|
|
# Fetch the restart event just sent above, without removing it
|
|
|
|
# from the queue. We will use this as a mock return value in
|
|
|
|
# get_user_events.
|
|
|
|
restart_event = orjson.loads(
|
|
|
|
self.tornado_call(
|
|
|
|
get_events_backend,
|
|
|
|
hamlet,
|
|
|
|
post_data={
|
|
|
|
"queue_id": client.event_queue.id,
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": "true",
|
|
|
|
"user_profile_id": hamlet.id,
|
|
|
|
"secret": settings.SHARED_SECRET,
|
|
|
|
"client": "internal",
|
|
|
|
},
|
|
|
|
client_name="internal",
|
|
|
|
).content
|
|
|
|
)["events"]
|
|
|
|
|
|
|
|
# Now the tricky part: We call events_register_backend,
|
|
|
|
# arranging it so that the first `get_user_events` call
|
|
|
|
# returns our restart event (triggering the recursive
|
|
|
|
# behavior), but the second (with a new queue) returns no
|
|
|
|
# events.
|
|
|
|
#
|
|
|
|
# Because get_user_events always returns [] in tests, we need
|
|
|
|
# to mock its return value as well; in an ideal world, we
|
|
|
|
# would only need to mock client / second_client.
|
|
|
|
with mock.patch(
|
|
|
|
"zerver.lib.events.request_event_queue",
|
|
|
|
side_effect=[client.event_queue.id, second_client.event_queue.id],
|
|
|
|
), mock.patch("zerver.lib.events.get_user_events", side_effect=[restart_event, []]):
|
|
|
|
self.tornado_call(
|
|
|
|
events_register_backend,
|
|
|
|
hamlet,
|
|
|
|
{
|
|
|
|
"queue_id": client.event_queue.id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": orjson.dumps(True).decode(),
|
|
|
|
},
|
|
|
|
client_name="website",
|
|
|
|
)
|
|
|
|
|
2021-03-21 15:12:24 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class FetchQueriesTest(ZulipTestCase):
|
|
|
|
def test_queries(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
|
|
|
|
self.login_user(user)
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.events.always_want") as want_mock:
|
2021-01-17 17:58:50 +01:00
|
|
|
fetch_initial_state_data(user)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
2021-04-29 17:22:48 +02:00
|
|
|
self.assert_length(queries, 33)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
expected_counts = dict(
|
|
|
|
alert_words=1,
|
|
|
|
custom_profile_fields=1,
|
|
|
|
default_streams=1,
|
|
|
|
default_stream_groups=1,
|
|
|
|
hotspots=0,
|
|
|
|
message=1,
|
|
|
|
muted_topics=1,
|
2021-03-27 12:23:32 +01:00
|
|
|
muted_users=1,
|
2020-06-29 13:11:26 +02:00
|
|
|
presence=1,
|
|
|
|
realm=0,
|
|
|
|
realm_bot=1,
|
|
|
|
realm_domains=1,
|
|
|
|
realm_embedded_bots=0,
|
|
|
|
realm_incoming_webhook_bots=0,
|
|
|
|
realm_emoji=1,
|
|
|
|
realm_filters=1,
|
2021-03-30 12:51:54 +02:00
|
|
|
realm_linkifiers=1,
|
2020-10-28 04:00:46 +01:00
|
|
|
realm_playgrounds=1,
|
2020-06-29 13:11:26 +02:00
|
|
|
realm_user=3,
|
|
|
|
realm_user_groups=2,
|
|
|
|
recent_private_conversations=1,
|
|
|
|
starred_messages=1,
|
|
|
|
stream=2,
|
|
|
|
stop_words=0,
|
2020-10-18 14:51:25 +02:00
|
|
|
subscription=4,
|
2020-06-29 13:11:26 +02:00
|
|
|
update_display_settings=0,
|
|
|
|
update_global_notifications=0,
|
|
|
|
update_message_flags=5,
|
|
|
|
user_status=1,
|
|
|
|
video_calls=0,
|
2021-03-19 13:21:18 +01:00
|
|
|
giphy=0,
|
2020-06-29 13:11:26 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
wanted_event_types = {item[0][0] for item in want_mock.call_args_list}
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
self.assertEqual(wanted_event_types, set(expected_counts))
|
|
|
|
|
|
|
|
for event_type in sorted(wanted_event_types):
|
|
|
|
count = expected_counts[event_type]
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
2021-02-12 08:20:45 +01:00
|
|
|
if event_type == "update_message_flags":
|
|
|
|
event_types = ["update_message_flags", "message"]
|
2020-06-29 13:11:26 +02:00
|
|
|
else:
|
|
|
|
event_types = [event_type]
|
|
|
|
|
2021-01-17 17:58:50 +01:00
|
|
|
fetch_initial_state_data(user, event_types=event_types)
|
2020-06-29 13:11:26 +02:00
|
|
|
self.assert_length(queries, count)
|
|
|
|
|
|
|
|
|
|
|
|
class TestEventsRegisterAllPublicStreamsDefaults(ZulipTestCase):
|
|
|
|
def setUp(self) -> None:
|
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.email = self.user_profile.email
|
|
|
|
|
|
|
|
def test_use_passed_all_public_true_default_false(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_true_default(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_false_default_false(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_false_default_true(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
|
|
|
def test_use_true_default_for_none(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_false_default_for_none(self) -> None:
|
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class TestEventsRegisterNarrowDefaults(ZulipTestCase):
|
|
|
|
def setUp(self) -> None:
|
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-06-29 13:11:26 +02:00
|
|
|
self.email = self.user_profile.email
|
2021-02-12 08:20:45 +01:00
|
|
|
self.stream = get_stream("Verona", self.user_profile.realm)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_use_passed_narrow_no_default(self) -> None:
|
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
result = _default_narrow(self.user_profile, [["stream", "my_stream"]])
|
|
|
|
self.assertEqual(result, [["stream", "my_stream"]])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_use_passed_narrow_with_default(self) -> None:
|
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
result = _default_narrow(self.user_profile, [["stream", "my_stream"]])
|
|
|
|
self.assertEqual(result, [["stream", "my_stream"]])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_use_default_if_narrow_is_empty(self) -> None:
|
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result, [["stream", "Verona"]])
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
def test_use_narrow_if_default_is_none(self) -> None:
|
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
|
|
|
self.assertEqual(result, [])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-29 13:11:26 +02:00
|
|
|
class TestGetRawUserDataSystemBotRealm(ZulipTestCase):
|
|
|
|
def test_get_raw_user_data_on_system_bot_realm(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = get_raw_user_data(
|
|
|
|
get_realm("zulipinternal"),
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
client_gravatar=True,
|
|
|
|
user_avatar_url_field_optional=True,
|
|
|
|
)
|
2020-06-29 13:11:26 +02:00
|
|
|
|
|
|
|
for bot_email in settings.CROSS_REALM_BOT_EMAILS:
|
|
|
|
bot_profile = get_system_bot(bot_email)
|
|
|
|
self.assertTrue(bot_profile.id in result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(result[bot_profile.id]["is_cross_realm_bot"])
|
2021-05-05 13:15:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestUserPresenceUpdatesDisabled(ZulipTestCase):
|
|
|
|
def test_presence_events_diabled_on_larger_realm(self) -> None:
|
|
|
|
# First check that normally the mocked function gets called.
|
2021-05-27 16:25:23 +02:00
|
|
|
events: List[Mapping[str, Any]] = []
|
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=1):
|
2021-05-05 13:15:13 +02:00
|
|
|
do_update_user_presence(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.ACTIVE,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now check that if the realm has more than the USER_LIMIT_FOR_SENDING_PRESENCE_UPDATE_EVENTS
|
|
|
|
# amount of active users, send_event doesn't get called.
|
2021-05-27 16:25:23 +02:00
|
|
|
with self.tornado_redirected_to_list(events, expected_num_events=0):
|
2021-05-05 13:15:13 +02:00
|
|
|
with self.settings(USER_LIMIT_FOR_SENDING_PRESENCE_UPDATE_EVENTS=1):
|
|
|
|
do_update_user_presence(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.ACTIVE,
|
|
|
|
)
|