2014-01-31 23:23:39 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2017-11-16 19:54:24 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2018-05-11 01:39:38 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
|
2017-05-22 23:36:12 +02:00
|
|
|
import os
|
|
|
|
import shutil
|
2017-10-09 16:20:14 +02:00
|
|
|
import sys
|
2016-06-03 08:00:04 +02:00
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
from django.conf import settings
|
2016-06-04 19:54:34 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2014-01-31 23:23:39 +01:00
|
|
|
from django.test import TestCase
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2018-05-04 22:57:36 +02:00
|
|
|
from io import StringIO
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
from zerver.models import (
|
2017-10-28 20:26:11 +02:00
|
|
|
get_client, get_realm, get_stream_recipient, get_stream, get_user,
|
2017-04-12 22:25:21 +02:00
|
|
|
Message, RealmDomain, Recipient, UserMessage, UserPresence, UserProfile,
|
2018-01-30 17:08:35 +01:00
|
|
|
Realm, Subscription, Stream, flush_per_request_caches, UserGroup, Service,
|
2017-12-14 22:22:17 +01:00
|
|
|
Attachment, PreregistrationUser,
|
2014-01-31 23:23:39 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
2018-03-31 07:30:24 +02:00
|
|
|
try_update_realm_custom_profile_field,
|
2017-03-24 05:49:23 +01:00
|
|
|
bulk_add_subscriptions,
|
2016-10-20 16:53:22 +02:00
|
|
|
bulk_remove_subscriptions,
|
2016-02-12 21:08:56 +01:00
|
|
|
check_add_realm_emoji,
|
2017-10-28 16:40:28 +02:00
|
|
|
check_send_message,
|
2017-03-18 03:50:41 +01:00
|
|
|
check_send_typing_notification,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_add_alert_words,
|
|
|
|
do_add_default_stream,
|
2017-10-08 09:34:59 +02:00
|
|
|
do_add_reaction,
|
2017-10-16 22:02:20 +02:00
|
|
|
do_add_reaction_legacy,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_add_realm_domain,
|
|
|
|
do_add_realm_filter,
|
2017-11-01 18:20:34 +01:00
|
|
|
do_add_streams_to_default_stream_group,
|
2018-02-12 10:53:36 +01:00
|
|
|
do_add_submessage,
|
2017-01-28 19:05:20 +01:00
|
|
|
do_change_avatar_fields,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_change_bot_owner,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_change_default_all_public_streams,
|
|
|
|
do_change_default_events_register_stream,
|
|
|
|
do_change_default_sending_stream,
|
2017-11-14 20:51:34 +01:00
|
|
|
do_change_default_stream_group_description,
|
2017-11-14 21:06:02 +01:00
|
|
|
do_change_default_stream_group_name,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_change_full_name,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_change_icon_source,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_change_is_admin,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_change_notification_settings,
|
|
|
|
do_change_realm_domain,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_change_stream_description,
|
2016-07-01 07:26:09 +02:00
|
|
|
do_change_subscription_property,
|
2018-08-02 08:47:13 +02:00
|
|
|
do_change_user_delivery_email,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_create_user,
|
2017-11-01 18:20:34 +01:00
|
|
|
do_create_default_stream_group,
|
2016-07-12 23:57:16 +02:00
|
|
|
do_deactivate_stream,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_deactivate_user,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_delete_message,
|
2017-12-14 22:22:17 +01:00
|
|
|
do_invite_users,
|
2017-01-24 01:48:35 +01:00
|
|
|
do_mark_hotspot_as_read,
|
2017-08-30 02:19:34 +02:00
|
|
|
do_mute_topic,
|
2017-02-15 21:06:07 +01:00
|
|
|
do_reactivate_user,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_regenerate_api_key,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_remove_alert_words,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_remove_default_stream,
|
2017-11-01 18:20:34 +01:00
|
|
|
do_remove_default_stream_group,
|
2017-10-08 09:34:59 +02:00
|
|
|
do_remove_reaction,
|
2017-10-16 22:05:00 +02:00
|
|
|
do_remove_reaction_legacy,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_remove_realm_domain,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_remove_realm_emoji,
|
|
|
|
do_remove_realm_filter,
|
2017-11-01 18:20:34 +01:00
|
|
|
do_remove_streams_from_default_stream_group,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_rename_stream,
|
2017-12-14 22:22:17 +01:00
|
|
|
do_revoke_user_invite,
|
2016-11-02 21:51:56 +01:00
|
|
|
do_set_realm_authentication_methods,
|
2017-03-21 18:08:40 +01:00
|
|
|
do_set_realm_message_editing,
|
2017-06-09 20:10:43 +02:00
|
|
|
do_set_realm_property,
|
|
|
|
do_set_user_display_setting,
|
2017-06-09 20:50:38 +02:00
|
|
|
do_set_realm_notifications_stream,
|
2017-10-20 16:55:04 +02:00
|
|
|
do_set_realm_signup_notifications_stream,
|
2017-08-30 02:19:34 +02:00
|
|
|
do_unmute_topic,
|
2017-03-24 05:54:20 +01:00
|
|
|
do_update_embedded_data,
|
2014-03-11 15:14:32 +01:00
|
|
|
do_update_message,
|
2017-03-24 03:19:23 +01:00
|
|
|
do_update_message_flags,
|
2018-01-16 20:34:12 +01:00
|
|
|
do_update_outgoing_webhook_service,
|
2014-02-02 15:14:57 +01:00
|
|
|
do_update_pointer,
|
2017-03-24 05:26:32 +01:00
|
|
|
do_update_user_presence,
|
2018-05-16 02:55:35 +02:00
|
|
|
get_typing_user_profiles,
|
2017-05-22 23:02:24 +02:00
|
|
|
log_event,
|
2017-11-14 20:33:09 +01:00
|
|
|
lookup_default_stream_groups,
|
2018-05-04 22:57:36 +02:00
|
|
|
notify_attachment_update,
|
2017-06-09 20:10:43 +02:00
|
|
|
notify_realm_custom_profile_fields,
|
2017-11-14 07:31:31 +01:00
|
|
|
check_add_user_group,
|
2017-11-14 08:00:18 +01:00
|
|
|
do_update_user_group_name,
|
2017-11-14 08:00:53 +01:00
|
|
|
do_update_user_group_description,
|
2017-11-14 08:01:39 +01:00
|
|
|
bulk_add_members_to_user_group,
|
2017-11-14 08:01:50 +01:00
|
|
|
remove_members_from_user_group,
|
2017-11-15 08:09:49 +01:00
|
|
|
check_delete_user_group,
|
2018-07-09 11:49:08 +02:00
|
|
|
do_update_user_custom_profile_data,
|
2016-11-30 10:42:58 +01:00
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.events import (
|
|
|
|
apply_events,
|
2014-01-31 23:23:39 +01:00
|
|
|
fetch_initial_state_data,
|
|
|
|
)
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
from zerver.lib.message import (
|
2017-11-10 15:14:13 +01:00
|
|
|
aggregate_unread_data,
|
|
|
|
get_raw_unread_data,
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
render_markdown,
|
|
|
|
UnreadMessagesResult,
|
|
|
|
)
|
2017-07-27 06:31:26 +02:00
|
|
|
from zerver.lib.test_helpers import POSTRequestMock, get_subscription, \
|
2018-03-11 18:55:20 +01:00
|
|
|
get_test_image_file, stub_event_queue_user_events, queries_captured
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.test_classes import (
|
|
|
|
ZulipTestCase,
|
|
|
|
)
|
2017-10-07 00:29:18 +02:00
|
|
|
from zerver.lib.test_runner import slow
|
2018-11-10 16:21:14 +01:00
|
|
|
from zerver.lib.topic import (
|
|
|
|
ORIG_TOPIC,
|
|
|
|
TOPIC_NAME,
|
|
|
|
TOPIC_LINKS,
|
|
|
|
)
|
2017-08-31 23:19:05 +02:00
|
|
|
from zerver.lib.topic_mutes import (
|
|
|
|
add_topic_mute,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
from zerver.lib.validator import (
|
2017-03-26 08:13:01 +02:00
|
|
|
check_bool, check_dict, check_dict_only, check_float, check_int, check_list, check_string,
|
2018-01-16 20:34:12 +01:00
|
|
|
equals, check_none_or, Validator, check_url
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
2018-05-04 22:57:36 +02:00
|
|
|
from zerver.lib.upload import upload_backend, attachment_url_to_path_id
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2016-10-12 05:25:20 +02:00
|
|
|
from zerver.views.events_register import _default_all_public_streams, _default_narrow
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2017-10-12 01:37:44 +02:00
|
|
|
from zerver.tornado.event_queue import (
|
|
|
|
allocate_client_descriptor,
|
|
|
|
clear_client_event_queues_for_testing,
|
2017-10-26 22:10:52 +02:00
|
|
|
get_client_info_for_message_event,
|
|
|
|
process_message_event,
|
2017-10-12 01:37:44 +02:00
|
|
|
EventQueue,
|
|
|
|
)
|
2018-07-13 13:10:12 +02:00
|
|
|
from zerver.tornado.views import get_events
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
from collections import OrderedDict
|
2016-11-17 16:52:28 +01:00
|
|
|
import mock
|
2014-01-28 18:11:08 +01:00
|
|
|
import time
|
2014-01-31 23:23:39 +01:00
|
|
|
import ujson
|
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
|
|
|
class LogEventsTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_with_missing_event_log_dir_setting(self) -> None:
|
2017-05-22 23:02:24 +02:00
|
|
|
with self.settings(EVENT_LOG_DIR=None):
|
2017-08-06 23:20:51 +02:00
|
|
|
log_event(dict())
|
2017-05-22 23:02:24 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_log_event_mkdir(self) -> None:
|
2017-05-22 23:36:12 +02:00
|
|
|
dir_name = 'var/test-log-dir'
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.rmtree(dir_name)
|
2017-05-30 00:26:33 +02:00
|
|
|
except OSError: # nocoverage
|
2017-05-22 23:36:12 +02:00
|
|
|
# assume it doesn't exist already
|
|
|
|
pass
|
|
|
|
|
|
|
|
self.assertFalse(os.path.exists(dir_name))
|
|
|
|
with self.settings(EVENT_LOG_DIR=dir_name):
|
|
|
|
event = {} # type: Dict[str, int]
|
|
|
|
log_event(event)
|
|
|
|
self.assertTrue(os.path.exists(dir_name))
|
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
2017-03-06 17:46:32 +01:00
|
|
|
class EventsEndpointTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_events_register_endpoint(self) -> None:
|
2017-03-06 17:46:32 +01:00
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
# This test is intended to get minimal coverage on the
|
|
|
|
# events_register code paths
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-06 17:46:32 +01:00
|
|
|
with mock.patch('zerver.views.events_register.do_events_register', return_value={}):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register')
|
2017-03-06 17:46:32 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value=None):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register')
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assert_json_error(result, "Could not allocate event queue")
|
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
return_event_queue = '15:11'
|
|
|
|
return_user_events = [] # type: (List[Any])
|
2017-07-16 09:41:38 +02:00
|
|
|
|
|
|
|
# Test that call is made to deal with a returning soft deactivated user.
|
|
|
|
with mock.patch('zerver.lib.events.maybe_catch_up_soft_deactivated_user') as fa:
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
|
2017-07-16 09:41:38 +02:00
|
|
|
self.assertEqual(fa.call_count, 1)
|
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:39:59 +02:00
|
|
|
result_dict = result.json()
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assertEqual(result_dict['last_event_id'], -1)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:11')
|
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
return_event_queue = '15:12'
|
|
|
|
return_user_events = [
|
|
|
|
{
|
|
|
|
'id': 6,
|
|
|
|
'type': 'pointer',
|
|
|
|
'pointer': 15,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
|
2017-07-27 06:31:26 +02:00
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:39:59 +02:00
|
|
|
result_dict = result.json()
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
self.assertEqual(result_dict['pointer'], 15)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:12')
|
|
|
|
|
2017-04-26 23:29:25 +02:00
|
|
|
# Now test with `fetch_event_types` not matching the event
|
2017-07-27 06:31:26 +02:00
|
|
|
return_event_queue = '15:13'
|
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register',
|
|
|
|
dict(event_types=ujson.dumps(['pointer']),
|
|
|
|
fetch_event_types=ujson.dumps(['message'])))
|
2017-04-26 23:29:25 +02:00
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:39:59 +02:00
|
|
|
result_dict = result.json()
|
2017-04-26 23:29:25 +02:00
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
# Check that the message event types data is in there
|
|
|
|
self.assertIn('max_message_id', result_dict)
|
|
|
|
# Check that the pointer event types data is not in there
|
|
|
|
self.assertNotIn('pointer', result_dict)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:13')
|
|
|
|
|
|
|
|
# Now test with `fetch_event_types` matching the event
|
2017-07-27 06:31:26 +02:00
|
|
|
with stub_event_queue_user_events(return_event_queue, return_user_events):
|
2017-12-14 19:02:31 +01:00
|
|
|
result = self.api_post(email, '/json/register',
|
|
|
|
dict(fetch_event_types=ujson.dumps(['pointer']),
|
|
|
|
event_types=ujson.dumps(['message'])))
|
2017-04-26 23:29:25 +02:00
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:39:59 +02:00
|
|
|
result_dict = result.json()
|
2017-04-26 23:29:25 +02:00
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
# Check that we didn't fetch the messages data
|
|
|
|
self.assertNotIn('max_message_id', result_dict)
|
|
|
|
# Check that the pointer data is in there, and is correctly
|
|
|
|
# updated (presering our atomicity guaranteed), though of
|
|
|
|
# course any future pointer events won't be distributed
|
|
|
|
self.assertIn('pointer', result_dict)
|
|
|
|
self.assertEqual(result_dict['pointer'], 15)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:13')
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_tornado_endpoint(self) -> None:
|
2016-11-17 16:52:28 +01:00
|
|
|
|
|
|
|
# This test is mostly intended to get minimal coverage on
|
|
|
|
# the /notify_tornado endpoint, so we can have 100% URL coverage,
|
|
|
|
# but it does exercise a little bit of the codepath.
|
|
|
|
post_data = dict(
|
|
|
|
data=ujson.dumps(
|
|
|
|
dict(
|
|
|
|
event=dict(
|
|
|
|
type='other'
|
|
|
|
),
|
2017-05-07 17:21:26 +02:00
|
|
|
users=[self.example_user('hamlet').id],
|
2016-11-17 16:52:28 +01:00
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
req = POSTRequestMock(post_data, user_profile=None)
|
|
|
|
req.META['REMOTE_ADDR'] = '127.0.0.1'
|
|
|
|
result = self.client_post_request('/notify_tornado', req)
|
|
|
|
self.assert_json_error(result, 'Access denied', status_code=403)
|
|
|
|
|
|
|
|
post_data['secret'] = settings.SHARED_SECRET
|
|
|
|
req = POSTRequestMock(post_data, user_profile=None)
|
|
|
|
req.META['REMOTE_ADDR'] = '127.0.0.1'
|
|
|
|
result = self.client_post_request('/notify_tornado', req)
|
|
|
|
self.assert_json_success(result)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class GetEventsTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def tornado_call(self, view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
post_data: Dict[str, Any]) -> HttpResponse:
|
2016-07-14 01:28:40 +02:00
|
|
|
request = POSTRequestMock(post_data, user_profile)
|
2014-01-31 23:23:39 +01:00
|
|
|
return view_func(request, user_profile)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_events(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2017-05-07 21:25:59 +02:00
|
|
|
recipient_user_profile = self.example_user('othello')
|
|
|
|
recipient_email = recipient_user_profile.email
|
2014-01-31 23:23:39 +01:00
|
|
|
self.login(email)
|
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"apply_markdown": ujson.dumps(True),
|
2017-10-31 18:36:18 +01:00
|
|
|
"client_gravatar": ujson.dumps(True),
|
2014-01-31 23:23:39 +01:00
|
|
|
"event_types": ujson.dumps(["message"]),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
queue_id = ujson.loads(result.content)["queue_id"]
|
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
recipient_result = self.tornado_call(get_events, recipient_user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"apply_markdown": ujson.dumps(True),
|
2017-10-31 18:36:18 +01:00
|
|
|
"client_gravatar": ujson.dumps(True),
|
2014-01-31 23:23:39 +01:00
|
|
|
"event_types": ujson.dumps(["message"]),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
self.assert_json_success(recipient_result)
|
|
|
|
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
|
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 0)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
local_id = '10.01'
|
|
|
|
check_send_message(
|
|
|
|
sender=user_profile,
|
|
|
|
client=get_client('whatever'),
|
|
|
|
message_type_name='private',
|
|
|
|
message_to=[recipient_email],
|
|
|
|
topic_name=None,
|
|
|
|
message_content='hello',
|
|
|
|
local_id=local_id,
|
|
|
|
sender_queue_id=queue_id,
|
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 1)
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
2017-07-14 19:30:23 +02:00
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
2014-07-18 00:18:06 +02:00
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
last_event_id = events[0]["id"]
|
2017-10-28 16:40:28 +02:00
|
|
|
local_id = '10.02'
|
|
|
|
|
|
|
|
check_send_message(
|
|
|
|
sender=user_profile,
|
|
|
|
client=get_client('whatever'),
|
|
|
|
message_type_name='private',
|
|
|
|
message_to=[recipient_email],
|
|
|
|
topic_name=None,
|
|
|
|
message_content='hello',
|
|
|
|
local_id=local_id,
|
|
|
|
sender_queue_id=queue_id,
|
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": last_event_id,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 1)
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
2017-07-14 19:30:23 +02:00
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
# Test that the received message in the receiver's event queue
|
|
|
|
# exists and does not contain a local id
|
2018-07-13 13:10:12 +02:00
|
|
|
recipient_result = self.tornado_call(get_events, recipient_user_profile,
|
2014-01-31 23:23:39 +01:00
|
|
|
{"queue_id": recipient_queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
recipient_events = ujson.loads(recipient_result.content)["events"]
|
|
|
|
self.assert_json_success(recipient_result)
|
|
|
|
self.assertEqual(len(recipient_events), 2)
|
|
|
|
self.assertEqual(recipient_events[0]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
|
2017-07-14 19:30:23 +02:00
|
|
|
self.assertTrue("local_message_id" not in recipient_events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(recipient_events[1]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
|
2017-07-14 19:30:23 +02:00
|
|
|
self.assertTrue("local_message_id" not in recipient_events[1])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_events_narrow(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2014-01-31 23:23:39 +01:00
|
|
|
self.login(email)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def get_message(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]:
|
2017-11-02 17:08:32 +01:00
|
|
|
result = self.tornado_call(
|
2018-07-13 13:10:12 +02:00
|
|
|
get_events,
|
2017-11-02 17:08:32 +01:00
|
|
|
user_profile,
|
|
|
|
dict(
|
|
|
|
apply_markdown=ujson.dumps(apply_markdown),
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=ujson.dumps(client_gravatar),
|
2017-11-02 17:08:32 +01:00
|
|
|
event_types=ujson.dumps(["message"]),
|
|
|
|
narrow=ujson.dumps([["stream", "denmark"]]),
|
|
|
|
user_client="website",
|
|
|
|
dont_block=ujson.dumps(True),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
queue_id = ujson.loads(result.content)["queue_id"]
|
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2017-11-02 17:08:32 +01:00
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 0)
|
|
|
|
|
|
|
|
self.send_personal_message(email, self.example_email("othello"), "hello")
|
|
|
|
self.send_stream_message(email, "Denmark", "**hello**")
|
|
|
|
|
2018-07-13 13:10:12 +02:00
|
|
|
result = self.tornado_call(get_events, user_profile,
|
2017-11-02 17:08:32 +01:00
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assert_length(events, 1)
|
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
return events[0]['message']
|
|
|
|
|
2017-10-31 18:36:18 +01:00
|
|
|
message = get_message(apply_markdown=False, client_gravatar=False)
|
2017-11-02 17:08:32 +01:00
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "**hello**")
|
2017-10-31 18:36:18 +01:00
|
|
|
self.assertIn('gravatar.com', message["avatar_url"])
|
2017-11-02 17:08:32 +01:00
|
|
|
|
2017-10-31 18:36:18 +01:00
|
|
|
message = get_message(apply_markdown=True, client_gravatar=False)
|
2017-11-02 17:08:32 +01:00
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
|
2017-10-31 18:36:18 +01:00
|
|
|
self.assertIn('gravatar.com', message["avatar_url"])
|
|
|
|
|
|
|
|
message = get_message(apply_markdown=False, client_gravatar=True)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "**hello**")
|
|
|
|
self.assertEqual(message["avatar_url"], None)
|
|
|
|
|
|
|
|
message = get_message(apply_markdown=True, client_gravatar=True)
|
|
|
|
self.assertEqual(message["display_recipient"], "Denmark")
|
|
|
|
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
|
|
|
|
self.assertEqual(message["avatar_url"], None)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class EventsRegisterTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-05-24 08:33:30 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2018-01-30 17:08:35 +01:00
|
|
|
def create_bot(self, email: str, **extras: Any) -> Optional[UserProfile]:
|
|
|
|
return self.create_test_bot(email, self.user_profile, **extras)
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def realm_bot_schema(self, field_name: str, check: Validator) -> Validator:
|
2017-04-20 17:31:41 +02:00
|
|
|
return self.check_events_dict([
|
2014-02-26 19:55:29 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('update')),
|
2017-03-26 08:13:01 +02:00
|
|
|
('bot', check_dict_only([
|
2014-02-26 19:55:29 +01:00
|
|
|
('email', check_string),
|
2016-10-26 05:18:50 +02:00
|
|
|
('user_id', check_int),
|
2014-02-26 19:55:29 +01:00
|
|
|
(field_name, check),
|
|
|
|
])),
|
|
|
|
])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def do_test(self, action: Callable[[], Any], event_types: Optional[List[str]]=None,
|
|
|
|
include_subscribers: bool=True, state_change_expected: bool=True,
|
|
|
|
client_gravatar: bool=False, num_events: int=1) -> List[Dict[str, Any]]:
|
2017-10-12 01:37:44 +02:00
|
|
|
'''
|
|
|
|
Make sure we have a clean slate of client descriptors for these tests.
|
|
|
|
If we don't do this, then certain failures will only manifest when you
|
2018-08-10 22:43:58 +02:00
|
|
|
run multiple tests within a single test function.
|
2017-10-12 01:37:44 +02:00
|
|
|
'''
|
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
2014-01-28 18:11:08 +01:00
|
|
|
client = allocate_client_descriptor(
|
|
|
|
dict(user_profile_id = self.user_profile.id,
|
|
|
|
user_profile_email = self.user_profile.email,
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = self.user_profile.realm_id,
|
2014-01-28 18:11:08 +01:00
|
|
|
event_types = event_types,
|
|
|
|
client_type_name = "website",
|
|
|
|
apply_markdown = True,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar = client_gravatar,
|
2014-01-28 18:11:08 +01:00
|
|
|
all_public_streams = False,
|
|
|
|
queue_timeout = 600,
|
|
|
|
last_connection_time = time.time(),
|
|
|
|
narrow = [])
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
# hybrid_state = initial fetch state + re-applying events triggered by our action
|
|
|
|
# normal_state = do action then fetch at the end (the "normal" code path)
|
2017-11-02 20:55:44 +01:00
|
|
|
hybrid_state = fetch_initial_state_data(
|
|
|
|
self.user_profile, event_types, "",
|
|
|
|
client_gravatar=True,
|
|
|
|
include_subscribers=include_subscribers
|
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
action()
|
|
|
|
events = client.event_queue.contents()
|
2018-12-07 02:38:10 +01:00
|
|
|
self.assertEqual(len(events), num_events)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
|
|
|
before = ujson.dumps(hybrid_state)
|
2017-11-02 21:40:12 +01:00
|
|
|
apply_events(hybrid_state, events, self.user_profile,
|
|
|
|
client_gravatar=True, include_subscribers=include_subscribers)
|
2017-02-21 19:35:17 +01:00
|
|
|
after = ujson.dumps(hybrid_state)
|
|
|
|
|
|
|
|
if state_change_expected:
|
|
|
|
if before == after:
|
2017-03-05 08:12:19 +01:00
|
|
|
print(events) # nocoverage
|
|
|
|
raise AssertionError('Test does not exercise enough code -- events do not change state.')
|
2017-02-21 19:35:17 +01:00
|
|
|
else:
|
|
|
|
if before != after:
|
2017-03-05 08:12:19 +01:00
|
|
|
raise AssertionError('Test is invalid--state actually does change here.')
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-02 20:55:44 +01:00
|
|
|
normal_state = fetch_initial_state_data(
|
|
|
|
self.user_profile, event_types, "",
|
|
|
|
client_gravatar=True,
|
|
|
|
include_subscribers=include_subscribers
|
|
|
|
)
|
2017-10-06 21:24:56 +02:00
|
|
|
self.match_states(hybrid_state, normal_state, events)
|
2014-02-04 20:52:02 +01:00
|
|
|
return events
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def assert_on_error(self, error: Optional[str]) -> None:
|
2014-02-04 20:52:02 +01:00
|
|
|
if error:
|
|
|
|
raise AssertionError(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def match_states(self, state1: Dict[str, Any], state2: Dict[str, Any],
|
|
|
|
events: List[Dict[str, Any]]) -> None:
|
|
|
|
def normalize(state: Dict[str, Any]) -> None:
|
2017-09-13 20:00:36 +02:00
|
|
|
for u in state['never_subscribed']:
|
|
|
|
if 'subscribers' in u:
|
|
|
|
u['subscribers'].sort()
|
2016-07-01 01:52:51 +02:00
|
|
|
for u in state['subscriptions']:
|
2017-02-20 08:30:09 +01:00
|
|
|
if 'subscribers' in u:
|
|
|
|
u['subscribers'].sort()
|
2014-02-04 19:09:30 +01:00
|
|
|
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
|
|
|
|
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
|
2014-02-26 00:12:14 +01:00
|
|
|
if 'realm_bots' in state:
|
|
|
|
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
|
2014-02-04 19:09:30 +01:00
|
|
|
normalize(state1)
|
|
|
|
normalize(state2)
|
2017-10-06 21:24:56 +02:00
|
|
|
|
|
|
|
# If this assertions fails, we have unusual problems.
|
|
|
|
self.assertEqual(state1.keys(), state2.keys())
|
|
|
|
|
|
|
|
# The far more likely scenario is that some section of
|
2017-10-06 23:08:41 +02:00
|
|
|
# our enormous payload does not get updated properly. We
|
2017-10-06 21:24:56 +02:00
|
|
|
# want the diff here to be developer-friendly, hence
|
|
|
|
# the somewhat tedious code to provide useful output.
|
2017-10-06 22:59:26 +02:00
|
|
|
if state1 != state2: # nocoverage
|
2017-10-06 21:24:56 +02:00
|
|
|
print('\n---States DO NOT MATCH---')
|
|
|
|
print('\nEVENTS:\n')
|
|
|
|
|
|
|
|
# Printing out the events is a big help to
|
|
|
|
# developers.
|
|
|
|
import json
|
|
|
|
for event in events:
|
|
|
|
print(json.dumps(event, indent=4))
|
|
|
|
|
|
|
|
print('\nMISMATCHES:\n')
|
|
|
|
for k in state1:
|
|
|
|
if state1[k] != state2[k]:
|
|
|
|
print('\nkey = ' + k)
|
|
|
|
try:
|
|
|
|
self.assertEqual({k: state1[k]}, {k: state2[k]})
|
|
|
|
except AssertionError as e:
|
|
|
|
print(e)
|
|
|
|
print('''
|
|
|
|
NOTE:
|
|
|
|
|
|
|
|
This is an advanced test that verifies how
|
|
|
|
we apply events after fetching data. If you
|
|
|
|
do not know how to debug it, you can ask for
|
|
|
|
help on chat.
|
|
|
|
''')
|
|
|
|
|
2017-10-09 16:20:14 +02:00
|
|
|
sys.stdout.flush()
|
2017-10-06 21:24:56 +02:00
|
|
|
raise AssertionError('Mismatching states')
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def check_events_dict(self, required_keys: List[Tuple[str, Validator]]) -> Validator:
|
2017-04-20 17:31:41 +02:00
|
|
|
required_keys.append(('id', check_int))
|
2018-07-25 05:54:29 +02:00
|
|
|
# Raise AssertionError if `required_keys` contains duplicate items.
|
|
|
|
keys = [key[0] for key in required_keys]
|
|
|
|
self.assertEqual(len(keys), len(set(keys)), 'Duplicate items found in required_keys.')
|
2017-04-20 17:31:41 +02:00
|
|
|
return check_dict_only(required_keys)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mentioned_send_message_events(self) -> None:
|
2017-07-21 20:31:25 +02:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
|
2017-08-10 10:58:39 +02:00
|
|
|
for i in range(3):
|
|
|
|
content = 'mentioning... @**' + user.full_name + '** hello ' + str(i)
|
|
|
|
self.do_test(
|
2017-10-28 16:40:28 +02:00
|
|
|
lambda: self.send_stream_message(self.example_email('cordelia'),
|
|
|
|
"Verona",
|
|
|
|
content)
|
2017-08-10 10:58:39 +02:00
|
|
|
|
|
|
|
)
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_pm_send_message_events(self) -> None:
|
2017-05-23 03:02:01 +02:00
|
|
|
self.do_test(
|
2017-10-28 16:40:28 +02:00
|
|
|
lambda: self.send_personal_message(self.example_email('cordelia'),
|
|
|
|
self.example_email('hamlet'),
|
|
|
|
'hola')
|
2017-05-23 03:02:01 +02:00
|
|
|
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_huddle_send_message_events(self) -> None:
|
2017-05-23 03:02:01 +02:00
|
|
|
huddle = [
|
|
|
|
self.example_email('hamlet'),
|
|
|
|
self.example_email('othello'),
|
|
|
|
]
|
|
|
|
self.do_test(
|
2017-10-28 16:40:28 +02:00
|
|
|
lambda: self.send_huddle_message(self.example_email('cordelia'),
|
|
|
|
huddle,
|
|
|
|
'hola')
|
2017-05-23 03:02:01 +02:00
|
|
|
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_stream_send_message_events(self) -> None:
|
|
|
|
def check_none(var_name: str, val: Any) -> Optional[str]:
|
2017-10-31 18:36:18 +01:00
|
|
|
assert(val is None)
|
|
|
|
return None
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_checker(check_gravatar: Validator) -> Validator:
|
2017-10-31 18:36:18 +01:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('message')),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('message', self.check_events_dict([
|
|
|
|
('avatar_url', check_gravatar),
|
|
|
|
('client', check_string),
|
|
|
|
('content', check_string),
|
|
|
|
('content_type', equals('text/html')),
|
|
|
|
('display_recipient', check_string),
|
|
|
|
('is_me_message', check_bool),
|
|
|
|
('reactions', check_list(None)),
|
|
|
|
('recipient_id', check_int),
|
|
|
|
('sender_realm_str', check_string),
|
|
|
|
('sender_email', check_string),
|
|
|
|
('sender_full_name', check_string),
|
|
|
|
('sender_id', check_int),
|
|
|
|
('sender_short_name', check_string),
|
|
|
|
('stream_id', check_int),
|
2018-11-10 16:21:14 +01:00
|
|
|
(TOPIC_NAME, check_string),
|
|
|
|
(TOPIC_LINKS, check_list(None)),
|
2018-02-11 14:09:17 +01:00
|
|
|
('submessages', check_list(None)),
|
2017-10-31 18:36:18 +01:00
|
|
|
('timestamp', check_int),
|
|
|
|
('type', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
return schema_checker
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
schema_checker = get_checker(check_gravatar=check_string)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-04-20 17:31:41 +02:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
events = self.do_test(
|
2017-10-28 16:40:28 +02:00
|
|
|
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=True,
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
2017-10-31 18:36:18 +01:00
|
|
|
schema_checker = get_checker(check_gravatar=check_none)
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-03-24 05:54:20 +01:00
|
|
|
# Verify message editing
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-11 15:14:32 +01:00
|
|
|
('type', equals('update_message')),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('content', check_string),
|
|
|
|
('edit_timestamp', check_int),
|
|
|
|
('message_id', check_int),
|
|
|
|
('message_ids', check_list(check_int)),
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
('prior_mention_user_ids', check_list(check_int)),
|
|
|
|
('mention_user_ids', check_list(check_int)),
|
2017-10-07 17:59:19 +02:00
|
|
|
('presence_idle_user_ids', check_list(check_int)),
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
('stream_push_user_ids', check_list(check_int)),
|
2017-11-21 04:35:26 +01:00
|
|
|
('stream_email_user_ids', check_list(check_int)),
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
('push_notify_user_ids', check_list(check_int)),
|
2014-03-11 15:14:32 +01:00
|
|
|
('orig_content', check_string),
|
|
|
|
('orig_rendered_content', check_string),
|
2018-11-10 16:21:14 +01:00
|
|
|
(ORIG_TOPIC, check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('prev_rendered_content_version', check_int),
|
2014-03-11 21:33:50 +01:00
|
|
|
('propagate_mode', check_string),
|
2014-03-11 15:14:32 +01:00
|
|
|
('rendered_content', check_string),
|
|
|
|
('sender', check_string),
|
2014-03-11 14:40:22 +01:00
|
|
|
('stream_id', check_int),
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
('stream_name', check_string),
|
2018-11-10 16:21:14 +01:00
|
|
|
(TOPIC_NAME, check_string),
|
|
|
|
(TOPIC_LINKS, check_list(None)),
|
2017-02-20 00:23:42 +01:00
|
|
|
('user_id', check_int),
|
2018-01-21 19:27:36 +01:00
|
|
|
('is_me_message', check_bool),
|
2014-03-11 15:14:32 +01:00
|
|
|
])
|
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
message = Message.objects.order_by('-id')[0]
|
2014-03-11 15:14:32 +01:00
|
|
|
topic = 'new_topic'
|
|
|
|
propagate_mode = 'change_all'
|
|
|
|
content = 'new content'
|
2016-10-04 18:32:46 +02:00
|
|
|
rendered_content = render_markdown(message, content)
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
prior_mention_user_ids = set() # type: Set[int]
|
|
|
|
mentioned_user_ids = set() # type: Set[int]
|
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_message(self.user_profile, message, topic,
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
propagate_mode, content, rendered_content,
|
|
|
|
prior_mention_user_ids,
|
|
|
|
mentioned_user_ids),
|
2017-05-23 03:02:01 +02:00
|
|
|
state_change_expected=True,
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
2014-03-11 15:14:32 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:54:20 +01:00
|
|
|
# Verify do_update_embedded_data
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-24 05:54:20 +01:00
|
|
|
('type', equals('update_message')),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('content', check_string),
|
|
|
|
('message_id', check_int),
|
|
|
|
('message_ids', check_list(check_int)),
|
|
|
|
('rendered_content', check_string),
|
|
|
|
('sender', check_string),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_embedded_data(self.user_profile, message,
|
|
|
|
u"embed_content", "<p>embed_content</p>"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_message_flags(self) -> None:
|
2017-03-24 03:19:23 +01:00
|
|
|
# Test message flag update events
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('all', check_bool),
|
2017-03-24 03:19:23 +01:00
|
|
|
('type', equals('update_message_flags')),
|
|
|
|
('flag', check_string),
|
|
|
|
('messages', check_list(check_int)),
|
|
|
|
('operation', equals("add")),
|
|
|
|
])
|
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_personal_message(
|
|
|
|
self.example_email("cordelia"),
|
|
|
|
self.example_email("hamlet"),
|
|
|
|
"hello",
|
|
|
|
)
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-03-24 03:19:23 +01:00
|
|
|
events = self.do_test(
|
2018-03-14 00:05:55 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'starred', [message]),
|
2018-08-14 23:57:20 +02:00
|
|
|
state_change_expected=True,
|
2017-03-24 03:19:23 +01:00
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('all', check_bool),
|
2017-03-24 03:19:23 +01:00
|
|
|
('type', equals('update_message_flags')),
|
|
|
|
('flag', check_string),
|
|
|
|
('messages', check_list(check_int)),
|
|
|
|
('operation', equals("remove")),
|
|
|
|
])
|
|
|
|
events = self.do_test(
|
2018-03-14 00:05:55 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, get_client("website"), 'remove', 'starred', [message]),
|
2018-08-14 23:57:20 +02:00
|
|
|
state_change_expected=True,
|
2017-03-24 03:19:23 +01:00
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_read_flag_removes_unread_msg_ids(self) -> None:
|
2017-05-23 03:02:01 +02:00
|
|
|
|
|
|
|
user_profile = self.example_user('hamlet')
|
2017-07-21 20:31:25 +02:00
|
|
|
mention = '@**' + user_profile.full_name + '**'
|
|
|
|
|
|
|
|
for content in ['hello', mention]:
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_stream_message(
|
2017-07-21 20:31:25 +02:00
|
|
|
self.example_email('cordelia'),
|
|
|
|
"Verona",
|
|
|
|
content
|
|
|
|
)
|
|
|
|
|
|
|
|
self.do_test(
|
2018-03-14 00:05:55 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'read', [message]),
|
2017-07-21 20:31:25 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_send_message_to_existing_recipient(self) -> None:
|
2017-10-28 16:40:28 +02:00
|
|
|
self.send_stream_message(
|
2017-05-23 03:02:01 +02:00
|
|
|
self.example_email('cordelia'),
|
|
|
|
"Verona",
|
|
|
|
"hello 1"
|
|
|
|
)
|
|
|
|
self.do_test(
|
2017-10-28 16:40:28 +02:00
|
|
|
lambda: self.send_stream_message("cordelia@zulip.com", "Verona", "hello 2"),
|
2017-05-23 03:02:01 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_add_reaction_legacy(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-23 04:04:19 +01:00
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
2017-05-01 07:29:56 +02:00
|
|
|
('emoji_code', check_string),
|
|
|
|
('reaction_type', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user', check_dict_only([
|
2017-03-23 04:04:19 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
|
2017-03-23 04:04:19 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
events = self.do_test(
|
2017-10-16 22:02:20 +02:00
|
|
|
lambda: do_add_reaction_legacy(
|
2017-03-23 04:04:19 +01:00
|
|
|
self.user_profile, message, "tada"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_remove_reaction_legacy(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-23 04:04:19 +01:00
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
2017-05-01 07:29:56 +02:00
|
|
|
('emoji_code', check_string),
|
|
|
|
('reaction_type', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user', check_dict_only([
|
2017-03-23 04:04:19 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
|
2017-03-23 04:04:19 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
2017-10-16 22:02:20 +02:00
|
|
|
do_add_reaction_legacy(self.user_profile, message, "tada")
|
2017-03-23 04:04:19 +01:00
|
|
|
events = self.do_test(
|
2017-10-16 22:05:00 +02:00
|
|
|
lambda: do_remove_reaction_legacy(
|
2017-03-23 04:04:19 +01:00
|
|
|
self.user_profile, message, "tada"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_add_reaction(self) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
|
|
|
('emoji_code', check_string),
|
|
|
|
('reaction_type', check_string),
|
|
|
|
('user', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_add_reaction(
|
|
|
|
self.user_profile, message, "tada", "1f389", "unicode_emoji"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-02-12 10:53:36 +01:00
|
|
|
def test_add_submessage(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('submessage')),
|
|
|
|
('message_id', check_int),
|
2018-05-30 22:41:15 +02:00
|
|
|
('submessage_id', check_int),
|
2018-02-12 10:53:36 +01:00
|
|
|
('sender_id', check_int),
|
|
|
|
('msg_type', check_string),
|
2018-05-30 21:12:16 +02:00
|
|
|
('content', check_string),
|
2018-02-12 10:53:36 +01:00
|
|
|
])
|
|
|
|
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
stream_name = 'Verona'
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
sender_email=cordelia.email,
|
|
|
|
stream_name=stream_name,
|
|
|
|
)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_add_submessage(
|
2018-11-02 23:33:54 +01:00
|
|
|
realm=cordelia.realm,
|
2018-02-12 10:53:36 +01:00
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=message_id,
|
|
|
|
msg_type='whatever',
|
|
|
|
content='"stuff"',
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_remove_reaction(self) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
|
|
|
('emoji_code', check_string),
|
|
|
|
('reaction_type', check_string),
|
|
|
|
('user', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_remove_reaction(
|
|
|
|
self.user_profile, message, "1f389", "unicode_emoji"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def test_invite_user_event(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('invites_changed')),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.user_profile = self.example_user('iago')
|
|
|
|
streams = []
|
|
|
|
for stream_name in ["Denmark", "Scotland"]:
|
|
|
|
streams.append(get_stream(stream_name, self.user_profile.realm))
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_revoke_user_invite_event(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('invites_changed')),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.user_profile = self.example_user('iago')
|
|
|
|
streams = []
|
|
|
|
for stream_name in ["Denmark", "Verona"]:
|
|
|
|
streams.append(get_stream(stream_name, self.user_profile.realm))
|
|
|
|
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
|
|
|
|
prereg_users = PreregistrationUser.objects.filter(referred_by__realm=self.user_profile.realm)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_revoke_user_invite(prereg_users[0]),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_invitation_accept_invite_event(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('invites_changed')),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.user_profile = self.example_user('iago')
|
|
|
|
streams = []
|
|
|
|
for stream_name in ["Denmark", "Scotland"]:
|
|
|
|
streams.append(get_stream(stream_name, self.user_profile.realm))
|
|
|
|
|
|
|
|
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
|
|
|
|
prereg_users = PreregistrationUser.objects.get(email="foo@zulip.com")
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_create_user('foo@zulip.com', 'password', self.user_profile.realm,
|
|
|
|
'full name', 'short name', prereg_user=prereg_users),
|
|
|
|
state_change_expected=True,
|
|
|
|
num_events=5,
|
|
|
|
)
|
|
|
|
|
|
|
|
error = schema_checker('events[4]', events[4])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_typing_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-18 03:50:41 +01:00
|
|
|
('type', equals('typing')),
|
|
|
|
('op', equals('start')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('sender', check_dict_only([
|
2017-03-18 03:50:41 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int)])),
|
2017-04-20 17:31:41 +02:00
|
|
|
('recipients', check_list(check_dict_only([
|
2017-03-18 03:50:41 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: check_send_typing_notification(
|
2017-05-25 01:50:35 +02:00
|
|
|
self.user_profile, [self.example_email("cordelia")], "start"),
|
2017-03-18 03:50:41 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-05-16 02:55:35 +02:00
|
|
|
def test_get_typing_user_profiles(self) -> None:
|
|
|
|
"""
|
|
|
|
Make sure we properly assert failures for recipient types that should not
|
|
|
|
get typing... notifications.
|
|
|
|
"""
|
|
|
|
|
|
|
|
sender_profile = self.example_user('cordelia')
|
|
|
|
stream = get_stream('Rome', sender_profile.realm)
|
|
|
|
|
|
|
|
# Test stream
|
|
|
|
with self.assertRaisesRegex(ValueError, 'not supported for streams'):
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
get_typing_user_profiles(recipient, sender_profile.id)
|
|
|
|
|
|
|
|
# Test some other recipient type
|
|
|
|
with self.assertRaisesRegex(ValueError, 'Bad recipient type'):
|
2018-05-16 05:07:33 +02:00
|
|
|
recipient = Recipient(type=999) # invalid type
|
2018-05-16 02:55:35 +02:00
|
|
|
get_typing_user_profiles(recipient, sender_profile.id)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_custom_profile_fields_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-17 10:07:22 +01:00
|
|
|
('type', equals('custom_profile_fields')),
|
2017-12-11 07:24:44 +01:00
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('fields', check_list(check_dict_only([
|
2018-03-20 06:35:35 +01:00
|
|
|
('id', check_int),
|
2017-03-17 10:07:22 +01:00
|
|
|
('type', check_int),
|
|
|
|
('name', check_string),
|
2018-03-31 07:30:24 +02:00
|
|
|
('hint', check_string),
|
2018-04-08 09:50:05 +02:00
|
|
|
('field_data', check_string),
|
2018-04-08 18:13:37 +02:00
|
|
|
('order', check_int),
|
2017-03-17 10:07:22 +01:00
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: notify_realm_custom_profile_fields(
|
2017-12-11 07:24:44 +01:00
|
|
|
self.user_profile.realm, 'add'),
|
2017-03-17 10:07:22 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2018-03-31 07:30:24 +02:00
|
|
|
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
field = realm.customprofilefield_set.get(realm=realm, name='Biography')
|
|
|
|
name = field.name
|
|
|
|
hint = 'Biography of the user'
|
|
|
|
try_update_realm_custom_profile_field(realm, field, name, hint=hint)
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: notify_realm_custom_profile_fields(
|
|
|
|
self.user_profile.realm, 'add'),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-07-09 11:49:08 +02:00
|
|
|
def test_custom_profile_field_data_events(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('person', check_dict_only([
|
|
|
|
('user_id', check_int),
|
2018-11-06 10:05:31 +01:00
|
|
|
('custom_profile_field', check_dict([
|
2018-07-09 11:49:08 +02:00
|
|
|
('id', check_int),
|
|
|
|
('value', check_none_or(check_string)),
|
2018-11-06 10:05:31 +01:00
|
|
|
], _allow_only_listed_keys=False)),
|
2018-07-09 11:49:08 +02:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
field_id = realm.customprofilefield_set.get(realm=realm, name='Biography').id
|
|
|
|
field = {
|
|
|
|
"id": field_id,
|
|
|
|
"value": "New value",
|
|
|
|
}
|
|
|
|
events = self.do_test(lambda: do_update_user_custom_profile_data(self.user_profile, [field]))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-08-09 14:02:32 +02:00
|
|
|
# Test we pass correct stringify value in custom-user-field data event
|
|
|
|
field_id = realm.customprofilefield_set.get(realm=realm, name='Mentor').id
|
|
|
|
field = {
|
|
|
|
"id": field_id,
|
|
|
|
"value": [self.example_user("ZOE").id],
|
|
|
|
}
|
|
|
|
events = self.do_test(lambda: do_update_user_custom_profile_data(self.user_profile, [field]))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_presence_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-04-25 11:50:30 +02:00
|
|
|
('type', equals('presence')),
|
|
|
|
('email', check_string),
|
|
|
|
('server_timestamp', check_float),
|
|
|
|
('presence', check_dict_only([
|
|
|
|
('website', check_dict_only([
|
|
|
|
('status', equals('active')),
|
|
|
|
('timestamp', check_int),
|
|
|
|
('client', check_string),
|
|
|
|
('pushable', check_bool),
|
|
|
|
])),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_presence_events_multiple_clients(self) -> None:
|
2017-04-25 11:50:30 +02:00
|
|
|
schema_checker_android = self.check_events_dict([
|
|
|
|
('type', equals('presence')),
|
2017-03-24 05:26:32 +01:00
|
|
|
('email', check_string),
|
2017-04-25 11:50:30 +02:00
|
|
|
('server_timestamp', check_float),
|
2017-04-20 17:31:41 +02:00
|
|
|
('presence', check_dict_only([
|
2017-04-25 11:50:30 +02:00
|
|
|
('ZulipAndroid/1.0', check_dict_only([
|
|
|
|
('status', equals('idle')),
|
|
|
|
('timestamp', check_int),
|
|
|
|
('client', check_string),
|
|
|
|
('pushable', check_bool),
|
|
|
|
])),
|
2017-03-24 05:26:32 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-12-14 19:02:31 +01:00
|
|
|
self.api_post(self.user_profile.email, "/api/v1/users/me/presence", {'status': 'idle'},
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid/1.0")
|
2017-04-25 11:50:30 +02:00
|
|
|
self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
|
|
|
|
events = self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("ZulipAndroid/1.0"), timezone_now(), UserPresence.IDLE))
|
|
|
|
error = schema_checker_android('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-03-24 05:26:32 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_pointer_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('pointer')),
|
|
|
|
('pointer', check_int)
|
|
|
|
])
|
2018-03-14 00:01:04 +01:00
|
|
|
events = self.do_test(lambda: do_update_pointer(self.user_profile, get_client("website"), 1500))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_register_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
realm_user_add_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
2017-11-02 21:40:12 +01:00
|
|
|
('avatar_url', check_none_or(check_string)),
|
2014-03-06 17:07:43 +01:00
|
|
|
('full_name', check_string),
|
|
|
|
('is_admin', check_bool),
|
|
|
|
('is_bot', check_bool),
|
2018-10-19 12:29:46 +02:00
|
|
|
('is_guest', check_bool),
|
2018-07-31 19:53:56 +02:00
|
|
|
('profile_data', check_dict_only([])),
|
2017-04-20 17:31:41 +02:00
|
|
|
('timezone', check_string),
|
2018-05-15 13:00:52 +02:00
|
|
|
('date_joined', check_string),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-01-04 09:00:26 +01:00
|
|
|
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
|
2017-03-24 05:49:23 +01:00
|
|
|
self.assert_length(events, 1)
|
2014-03-06 17:07:43 +01:00
|
|
|
error = realm_user_add_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_alert_words_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
alert_words_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('alert_words')),
|
|
|
|
('alert_words', check_list(check_string)),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
|
|
|
|
error = alert_words_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
|
|
|
|
error = alert_words_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-14 07:31:31 +01:00
|
|
|
def test_user_group_events(self) -> None:
|
|
|
|
user_group_add_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('group', check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
('name', check_string),
|
|
|
|
('members', check_list(check_int)),
|
|
|
|
('description', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
zulip = get_realm('zulip')
|
|
|
|
events = self.do_test(lambda: check_add_user_group(zulip, 'backend', [othello],
|
|
|
|
'Backend team'))
|
|
|
|
error = user_group_add_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 08:00:18 +01:00
|
|
|
# Test name update
|
|
|
|
user_group_update_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('group_id', check_int),
|
|
|
|
('data', check_dict_only([
|
|
|
|
('name', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
backend = UserGroup.objects.get(name='backend')
|
|
|
|
events = self.do_test(lambda: do_update_user_group_name(backend, 'backendteam'))
|
|
|
|
error = user_group_update_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 08:00:53 +01:00
|
|
|
# Test description update
|
|
|
|
user_group_update_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('group_id', check_int),
|
|
|
|
('data', check_dict_only([
|
|
|
|
('description', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
description = "Backend team to deal with backend code."
|
|
|
|
events = self.do_test(lambda: do_update_user_group_description(backend, description))
|
|
|
|
error = user_group_update_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
# Test add members
|
|
|
|
user_group_add_member_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('add_members')),
|
|
|
|
('group_id', check_int),
|
|
|
|
('user_ids', check_list(check_int)),
|
|
|
|
])
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
events = self.do_test(lambda: bulk_add_members_to_user_group(backend, [hamlet]))
|
|
|
|
error = user_group_add_member_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 08:01:50 +01:00
|
|
|
# Test remove members
|
|
|
|
user_group_remove_member_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('remove_members')),
|
|
|
|
('group_id', check_int),
|
|
|
|
('user_ids', check_list(check_int)),
|
|
|
|
])
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
events = self.do_test(lambda: remove_members_from_user_group(backend, [hamlet]))
|
|
|
|
error = user_group_remove_member_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-15 08:09:49 +01:00
|
|
|
# Test delete event
|
|
|
|
user_group_remove_checker = self.check_events_dict([
|
|
|
|
('type', equals('user_group')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('group_id', check_int),
|
|
|
|
])
|
2018-02-19 13:38:18 +01:00
|
|
|
events = self.do_test(lambda: check_delete_user_group(backend.id, othello))
|
2017-11-15 08:09:49 +01:00
|
|
|
error = user_group_remove_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_stream_groups_events(self) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
default_stream_groups_checker = self.check_events_dict([
|
|
|
|
('type', equals('default_stream_groups')),
|
|
|
|
('default_stream_groups', check_list(check_dict_only([
|
|
|
|
('name', check_string),
|
2017-11-14 20:33:09 +01:00
|
|
|
('id', check_int),
|
2017-11-14 20:51:34 +01:00
|
|
|
('description', check_string),
|
2017-11-01 18:20:34 +01:00
|
|
|
('streams', check_list(check_dict_only([
|
|
|
|
('description', check_string),
|
|
|
|
('invite_only', check_bool),
|
2018-05-14 12:06:25 +02:00
|
|
|
('is_announcement_only', check_bool),
|
2017-11-01 18:20:34 +01:00
|
|
|
('name', check_string),
|
2018-05-07 23:14:15 +02:00
|
|
|
('stream_id', check_int),
|
|
|
|
('history_public_to_subscribers', check_bool)]))),
|
2017-11-01 18:20:34 +01:00
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
|
|
|
streams = []
|
|
|
|
for stream_name in ["Scotland", "Verona", "Denmark"]:
|
|
|
|
streams.append(get_stream(stream_name, self.user_profile.realm))
|
|
|
|
|
2017-11-14 20:51:34 +01:00
|
|
|
events = self.do_test(lambda: do_create_default_stream_group(
|
|
|
|
self.user_profile.realm, "group1", "This is group1", streams))
|
2017-11-01 18:20:34 +01:00
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
2017-11-01 18:20:34 +01:00
|
|
|
venice_stream = get_stream("Venice", self.user_profile.realm)
|
|
|
|
events = self.do_test(lambda: do_add_streams_to_default_stream_group(self.user_profile.realm,
|
2017-11-14 20:33:09 +01:00
|
|
|
group, [venice_stream]))
|
2017-11-01 18:20:34 +01:00
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_remove_streams_from_default_stream_group(self.user_profile.realm,
|
2017-11-14 20:33:09 +01:00
|
|
|
group, [venice_stream]))
|
2017-11-01 18:20:34 +01:00
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 20:51:34 +01:00
|
|
|
events = self.do_test(lambda: do_change_default_stream_group_description(self.user_profile.realm,
|
|
|
|
group, "New description"))
|
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 21:06:02 +01:00
|
|
|
events = self.do_test(lambda: do_change_default_stream_group_name(self.user_profile.realm,
|
|
|
|
group, "New Group Name"))
|
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
events = self.do_test(lambda: do_remove_default_stream_group(self.user_profile.realm, group))
|
2017-11-01 18:20:34 +01:00
|
|
|
error = default_stream_groups_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_streams_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
default_streams_checker = self.check_events_dict([
|
2016-05-20 22:08:42 +02:00
|
|
|
('type', equals('default_streams')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('default_streams', check_list(check_dict_only([
|
2016-05-20 22:08:42 +02:00
|
|
|
('description', check_string),
|
|
|
|
('invite_only', check_bool),
|
|
|
|
('name', check_string),
|
|
|
|
('stream_id', check_int),
|
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
2017-01-30 04:23:08 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
|
|
|
events = self.do_test(lambda: do_add_default_stream(stream))
|
2016-05-20 22:08:42 +02:00
|
|
|
error = default_streams_checker('events[0]', events[0])
|
2017-01-30 04:25:40 +01:00
|
|
|
events = self.do_test(lambda: do_remove_default_stream(stream))
|
|
|
|
error = default_streams_checker('events[0]', events[0])
|
2016-05-20 22:08:42 +02:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_muted_topics_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
muted_topics_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('muted_topics')),
|
|
|
|
('muted_topics', check_list(check_list(check_string, 2))),
|
|
|
|
])
|
2017-08-30 02:19:34 +02:00
|
|
|
stream = get_stream('Denmark', self.user_profile.realm)
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2017-08-30 02:19:34 +02:00
|
|
|
events = self.do_test(lambda: do_mute_topic(
|
|
|
|
self.user_profile, stream, recipient, "topic"))
|
2017-03-24 05:32:50 +01:00
|
|
|
error = muted_topics_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-08-30 02:19:34 +02:00
|
|
|
events = self.do_test(lambda: do_unmute_topic(
|
|
|
|
self.user_profile, stream, "topic"))
|
2017-03-24 05:32:50 +01:00
|
|
|
error = muted_topics_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_avatar_fields(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-21 21:37:16 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2017-02-21 21:37:16 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('avatar_url', check_string),
|
2017-10-21 16:33:07 +02:00
|
|
|
('avatar_url_medium', check_string),
|
|
|
|
('avatar_source', check_string),
|
2017-02-21 21:37:16 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_USER),
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-02-05 21:42:54 +01:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('person', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('avatar_url', check_none_or(check_string)),
|
|
|
|
('avatar_url_medium', check_none_or(check_string)),
|
|
|
|
('avatar_source', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_GRAVATAR),
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_full_name(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-04-07 07:28:28 +02:00
|
|
|
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet', self.user_profile))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2018-08-02 08:47:13 +02:00
|
|
|
def test_change_user_delivery_email_email_address_visibilty_admins(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('person', check_dict_only([
|
|
|
|
('delivery_email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
do_set_realm_property(self.user_profile.realm, "email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
|
|
|
|
action = lambda: do_change_user_delivery_email(self.user_profile, 'newhamlet@zulip.com')
|
|
|
|
events = self.do_test(action, num_events=1)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def do_set_realm_property_test(self, name: str) -> None:
|
2017-04-12 22:25:21 +02:00
|
|
|
bool_tests = [True, False, True] # type: List[bool]
|
|
|
|
test_values = dict(
|
|
|
|
default_language=[u'es', u'de', u'en'],
|
|
|
|
description=[u'Realm description', u'New description'],
|
|
|
|
message_retention_days=[10, 20],
|
|
|
|
name=[u'Zulip', u'New Name'],
|
|
|
|
waiting_period_threshold=[10, 20],
|
2018-12-07 00:48:06 +01:00
|
|
|
email_address_visibility=[Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS],
|
2018-01-29 16:10:54 +01:00
|
|
|
bot_creation_policy=[Realm.BOT_CREATION_EVERYONE],
|
2018-04-23 14:51:30 +02:00
|
|
|
video_chat_provider=[u'Google Hangouts', u'Jitsi'],
|
|
|
|
google_hangouts_domain=[u"zulip.com", u"zulip.org"],
|
2017-04-12 22:25:21 +02:00
|
|
|
) # type: Dict[str, Any]
|
|
|
|
|
2017-07-04 23:18:29 +02:00
|
|
|
vals = test_values.get(name)
|
2017-04-12 22:25:21 +02:00
|
|
|
property_type = Realm.property_types[name]
|
2017-03-24 13:04:17 +01:00
|
|
|
if property_type is bool:
|
|
|
|
validator = check_bool
|
2017-07-04 23:18:29 +02:00
|
|
|
vals = bool_tests
|
2018-05-11 01:39:38 +02:00
|
|
|
elif property_type is str:
|
2017-03-24 13:04:17 +01:00
|
|
|
validator = check_string
|
|
|
|
elif property_type is int:
|
|
|
|
validator = check_int
|
2017-04-09 00:35:41 +02:00
|
|
|
elif property_type == (int, type(None)):
|
|
|
|
validator = check_int
|
2017-03-24 13:04:17 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected property type %s" % (property_type,))
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update')),
|
2017-03-24 13:04:17 +01:00
|
|
|
('property', equals(name)),
|
|
|
|
('value', validator),
|
2014-03-06 17:07:43 +01:00
|
|
|
])
|
2017-03-24 13:04:17 +01:00
|
|
|
|
2017-04-12 22:25:21 +02:00
|
|
|
if vals is None:
|
|
|
|
raise AssertionError('No test created for %s' % (name))
|
|
|
|
do_set_realm_property(self.user_profile.realm, name, vals[0])
|
|
|
|
for val in vals[1:]:
|
2017-03-24 13:04:17 +01:00
|
|
|
events = self.do_test(
|
2017-04-12 22:25:21 +02:00
|
|
|
lambda: do_set_realm_property(self.user_profile.realm, name, val))
|
2017-03-24 13:04:17 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-10-07 00:29:18 +02:00
|
|
|
@slow("Actually runs several full-stack fetching tests")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_property(self) -> None:
|
2017-04-12 22:25:21 +02:00
|
|
|
for prop in Realm.property_types:
|
|
|
|
self.do_set_realm_property_test(prop)
|
2017-03-04 06:39:45 +01:00
|
|
|
|
2017-10-28 00:46:59 +02:00
|
|
|
@slow("Runs a large matrix of tests")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_authentication_methods(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-11-02 21:51:56 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('default')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('authentication_methods', check_dict([]))
|
|
|
|
])),
|
2016-11-02 21:51:56 +01:00
|
|
|
])
|
2017-02-21 19:35:17 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def fake_backends() -> Any:
|
2017-02-21 19:35:17 +01:00
|
|
|
backends = (
|
|
|
|
'zproject.backends.DevAuthBackend',
|
|
|
|
'zproject.backends.EmailAuthBackend',
|
|
|
|
'zproject.backends.GitHubAuthBackend',
|
|
|
|
'zproject.backends.GoogleMobileOauth2Backend',
|
|
|
|
'zproject.backends.ZulipLDAPAuthBackend',
|
|
|
|
)
|
|
|
|
return self.settings(AUTHENTICATION_BACKENDS=backends)
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
# Test transitions; any new backends should be tested with T/T/T/F/T
|
|
|
|
for (auth_method_dict) in \
|
|
|
|
({'Google': True, 'Email': True, 'GitHub': True, 'LDAP': False, 'Dev': False},
|
2016-12-03 00:04:17 +01:00
|
|
|
{'Google': True, 'Email': True, 'GitHub': False, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': True, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': True, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': False, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': True},
|
|
|
|
{'Google': False, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': True},
|
|
|
|
{'Google': False, 'Email': True, 'GitHub': True, 'LDAP': True, 'Dev': False}):
|
2017-02-21 19:35:17 +01:00
|
|
|
with fake_backends():
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_authentication_methods(
|
|
|
|
self.user_profile.realm,
|
|
|
|
auth_method_dict))
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_pin_stream(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-07-01 07:26:09 +02:00
|
|
|
('type', equals('subscription')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('pin_to_top')),
|
2017-03-05 01:30:48 +01:00
|
|
|
('stream_id', check_int),
|
2016-07-01 07:26:09 +02:00
|
|
|
('value', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
('name', check_string),
|
|
|
|
('email', check_string),
|
2016-07-01 07:26:09 +02:00
|
|
|
])
|
2017-03-05 01:30:48 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2017-02-21 19:35:17 +01:00
|
|
|
do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", False)
|
|
|
|
for pinned in (True, False):
|
2016-07-01 07:26:09 +02:00
|
|
|
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-10-28 00:46:59 +02:00
|
|
|
@slow("Runs a matrix of 6 queries to the /home view")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_message_edit_settings(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-06-21 21:34:41 +02:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('default')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('allow_message_editing', check_bool),
|
|
|
|
('message_content_edit_limit_seconds', check_int),
|
2017-12-03 00:50:48 +01:00
|
|
|
('allow_community_topic_editing', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
])),
|
2016-06-21 21:34:41 +02:00
|
|
|
])
|
2016-07-08 02:25:55 +02:00
|
|
|
# Test every transition among the four possibilities {T,F} x {0, non-0}
|
|
|
|
for (allow_message_editing, message_content_edit_limit_seconds) in \
|
2017-10-07 00:29:18 +02:00
|
|
|
((True, 0), (False, 0), (False, 1234),
|
|
|
|
(True, 600), (False, 0), (True, 1234)):
|
2017-03-21 18:08:40 +01:00
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_message_editing(self.user_profile.realm,
|
|
|
|
allow_message_editing,
|
2017-12-03 00:50:48 +01:00
|
|
|
message_content_edit_limit_seconds,
|
|
|
|
False))
|
2016-06-21 21:34:41 +02:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_notifications_stream(self) -> None:
|
2017-06-09 20:50:38 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('notifications_stream_id')),
|
|
|
|
('value', check_int),
|
|
|
|
])
|
|
|
|
|
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
|
|
|
for notifications_stream, notifications_stream_id in ((stream, stream.id), (None, -1)):
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_notifications_stream(self.user_profile.realm,
|
|
|
|
notifications_stream,
|
|
|
|
notifications_stream_id))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-10-20 16:55:04 +02:00
|
|
|
def test_change_realm_signup_notifications_stream(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('signup_notifications_stream_id')),
|
|
|
|
('value', check_int),
|
|
|
|
])
|
|
|
|
|
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
|
|
|
for signup_notifications_stream, signup_notifications_stream_id in ((stream, stream.id), (None, -1)):
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_signup_notifications_stream(self.user_profile.realm,
|
|
|
|
signup_notifications_stream,
|
|
|
|
signup_notifications_stream_id))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_is_admin(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
|
|
|
('is_admin', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-02-21 19:35:17 +01:00
|
|
|
do_change_is_admin(self.user_profile, False)
|
|
|
|
for is_admin in [True, False]:
|
2014-03-06 17:07:43 +01:00
|
|
|
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def do_set_user_display_settings_test(self, setting_name: str) -> None:
|
2017-06-18 03:48:52 +02:00
|
|
|
"""Test updating each setting in UserProfile.property_types dict."""
|
|
|
|
|
|
|
|
test_changes = dict(
|
|
|
|
emojiset = [u'apple', u'twitter'],
|
|
|
|
default_language = [u'es', u'de', u'en'],
|
|
|
|
timezone = [u'US/Mountain', u'US/Samoa', u'Pacific/Galapogos', u'']
|
|
|
|
) # type: Dict[str, Any]
|
2017-04-07 00:05:55 +02:00
|
|
|
|
|
|
|
property_type = UserProfile.property_types[setting_name]
|
|
|
|
if property_type is bool:
|
|
|
|
validator = check_bool
|
2018-05-11 01:39:38 +02:00
|
|
|
elif property_type is str:
|
2017-04-07 00:05:55 +02:00
|
|
|
validator = check_string
|
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected property type %s" % (property_type,))
|
|
|
|
|
2017-04-02 20:57:27 +02:00
|
|
|
num_events = 1
|
|
|
|
if setting_name == "timezone":
|
|
|
|
num_events = 2
|
2017-06-18 03:48:52 +02:00
|
|
|
values = test_changes.get(setting_name)
|
2017-07-13 23:07:54 +02:00
|
|
|
if property_type is bool:
|
2018-05-24 20:53:26 +02:00
|
|
|
if getattr(self.user_profile, setting_name) is False:
|
|
|
|
values = [True, False, True]
|
|
|
|
else:
|
|
|
|
values = [False, True, False]
|
2017-06-18 03:48:52 +02:00
|
|
|
if values is None:
|
|
|
|
raise AssertionError('No test created for %s' % (setting_name))
|
|
|
|
|
|
|
|
for value in values:
|
2017-04-07 00:05:55 +02:00
|
|
|
events = self.do_test(lambda: do_set_user_display_setting(
|
2017-04-02 20:57:27 +02:00
|
|
|
self.user_profile, setting_name, value), num_events=num_events)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-04-02 20:57:27 +02:00
|
|
|
('type', equals('update_display_settings')),
|
|
|
|
('setting_name', equals(setting_name)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', validator),
|
|
|
|
])
|
2018-03-05 03:46:07 +01:00
|
|
|
language_schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('update_display_settings')),
|
|
|
|
('language_name', check_string),
|
|
|
|
('setting_name', equals(setting_name)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', validator),
|
|
|
|
])
|
|
|
|
if setting_name == "default_language":
|
|
|
|
error = language_schema_checker('events[0]', events[0])
|
|
|
|
else:
|
|
|
|
error = schema_checker('events[0]', events[0])
|
2015-08-19 22:35:46 +02:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
timezone_schema_checker = self.check_events_dict([
|
2017-04-02 20:57:27 +02:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2017-04-02 20:57:27 +02:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('timezone', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
if setting_name == "timezone":
|
|
|
|
error = timezone_schema_checker('events[1]', events[1])
|
|
|
|
|
2017-10-07 00:29:18 +02:00
|
|
|
@slow("Actually runs several full-stack fetching tests")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_set_user_display_settings(self) -> None:
|
2017-06-18 03:48:52 +02:00
|
|
|
for prop in UserProfile.property_types:
|
|
|
|
self.do_set_user_display_settings_test(prop)
|
2017-03-02 08:30:53 +01:00
|
|
|
|
2017-10-07 00:29:18 +02:00
|
|
|
@slow("Actually runs several full-stack fetching tests")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_notification_settings(self) -> None:
|
2017-05-23 03:19:21 +02:00
|
|
|
for notification_setting, v in self.user_profile.notification_setting_types.items():
|
2018-01-11 21:36:11 +01:00
|
|
|
if notification_setting == "notification_sound":
|
|
|
|
# notification_sound is tested in its own test
|
|
|
|
continue
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('update_global_notifications')),
|
|
|
|
('notification_name', equals(notification_setting)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', check_bool),
|
|
|
|
])
|
|
|
|
do_change_notification_settings(self.user_profile, notification_setting, False)
|
2018-01-11 21:36:11 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
for setting_value in [True, False]:
|
|
|
|
events = self.do_test(lambda: do_change_notification_settings(
|
|
|
|
self.user_profile, notification_setting, setting_value, log=False))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2016-12-08 21:06:23 +01:00
|
|
|
|
2018-01-11 21:36:11 +01:00
|
|
|
def test_change_notification_sound(self) -> None:
|
|
|
|
notification_setting = "notification_sound"
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('update_global_notifications')),
|
|
|
|
('notification_name', equals(notification_setting)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', equals("ding")),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_change_notification_settings(
|
|
|
|
self.user_profile, notification_setting, 'ding', log=False))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_emoji_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_emoji')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('realm_emoji', check_dict([])),
|
|
|
|
])
|
2018-03-11 18:55:20 +01:00
|
|
|
author = self.example_user('iago')
|
|
|
|
with get_test_image_file('img.png') as img_file:
|
|
|
|
events = self.do_test(lambda: check_add_realm_emoji(get_realm("zulip"),
|
|
|
|
"my_emoji",
|
|
|
|
author,
|
|
|
|
img_file))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
events = self.do_test(lambda: do_remove_realm_emoji(get_realm("zulip"), "my_emoji"))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_filter_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_filters')),
|
2017-07-11 21:31:45 +02:00
|
|
|
('realm_filters', check_list(None)), # TODO: validate tuples in the list
|
2014-03-06 17:07:43 +01:00
|
|
|
])
|
2017-01-04 05:30:48 +01:00
|
|
|
events = self.do_test(lambda: do_add_realm_filter(get_realm("zulip"), "#(?P<id>[123])",
|
2014-03-06 17:07:43 +01:00
|
|
|
"https://realm.com/my_realm_filter/%(id)s"))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
self.do_test(lambda: do_remove_realm_filter(get_realm("zulip"), "#(?P<id>[123])"))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_domain_events(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-12-26 19:19:02 +01:00
|
|
|
('type', equals('realm_domains')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('realm_domain', check_dict_only([
|
2016-12-26 19:19:02 +01:00
|
|
|
('domain', check_string),
|
2017-01-21 08:19:03 +01:00
|
|
|
('allow_subdomains', check_bool),
|
2016-12-26 19:19:02 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
realm = get_realm('zulip')
|
2017-03-31 19:53:34 +02:00
|
|
|
events = self.do_test(lambda: do_add_realm_domain(realm, 'zulip.org', False))
|
2016-12-26 19:19:02 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-12-26 19:19:02 +01:00
|
|
|
('type', equals('realm_domains')),
|
2017-02-09 22:44:03 +01:00
|
|
|
('op', equals('change')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('realm_domain', check_dict_only([
|
2017-02-09 22:44:03 +01:00
|
|
|
('domain', equals('zulip.org')),
|
|
|
|
('allow_subdomains', equals(True)),
|
|
|
|
])),
|
2016-12-26 19:19:02 +01:00
|
|
|
])
|
2017-03-31 20:41:16 +02:00
|
|
|
test_domain = RealmDomain.objects.get(realm=realm, domain='zulip.org')
|
|
|
|
events = self.do_test(lambda: do_change_realm_domain(test_domain, True))
|
2017-02-09 22:44:03 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-09 22:44:03 +01:00
|
|
|
('type', equals('realm_domains')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('domain', equals('zulip.org')),
|
|
|
|
])
|
2017-03-31 20:41:16 +02:00
|
|
|
events = self.do_test(lambda: do_remove_realm_domain(test_domain))
|
2016-12-26 19:19:02 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_create_bot(self) -> None:
|
2018-01-30 17:10:10 +01:00
|
|
|
|
|
|
|
def get_bot_created_checker(bot_type: str) -> Validator:
|
|
|
|
if bot_type == "GENERIC_BOT":
|
2018-01-30 19:21:13 +01:00
|
|
|
check_services = check_list(sub_validator=None, length=0)
|
2018-01-30 17:10:10 +01:00
|
|
|
elif bot_type == "OUTGOING_WEBHOOK_BOT":
|
2018-03-16 16:46:51 +01:00
|
|
|
check_services = check_list(check_dict_only([
|
2018-01-30 19:21:13 +01:00
|
|
|
('base_url', check_url),
|
|
|
|
('interface', check_int),
|
2018-05-30 11:09:35 +02:00
|
|
|
('token', check_string),
|
2018-01-30 19:21:13 +01:00
|
|
|
]), length=1)
|
|
|
|
elif bot_type == "EMBEDDED_BOT":
|
2018-03-16 16:46:51 +01:00
|
|
|
check_services = check_list(check_dict_only([
|
2018-01-30 19:21:13 +01:00
|
|
|
('service_name', check_string),
|
|
|
|
('config_data', check_dict(value_validator=check_string)),
|
|
|
|
]), length=1)
|
2018-01-30 17:10:10 +01:00
|
|
|
return self.check_events_dict([
|
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('bot', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('bot_type', check_int),
|
|
|
|
('full_name', check_string),
|
|
|
|
('is_active', check_bool),
|
|
|
|
('api_key', check_string),
|
|
|
|
('default_sending_stream', check_none_or(check_string)),
|
|
|
|
('default_events_register_stream', check_none_or(check_string)),
|
|
|
|
('default_all_public_streams', check_bool),
|
|
|
|
('avatar_url', check_string),
|
|
|
|
('owner', check_string),
|
2018-01-30 19:21:13 +01:00
|
|
|
('services', check_services),
|
2018-01-30 17:10:10 +01:00
|
|
|
])),
|
|
|
|
])
|
2018-01-30 17:08:35 +01:00
|
|
|
action = lambda: self.create_bot('test')
|
|
|
|
events = self.do_test(action, num_events=3)
|
2018-01-30 17:10:10 +01:00
|
|
|
error = get_bot_created_checker(bot_type="GENERIC_BOT")('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
action = lambda: self.create_bot('test_outgoing_webhook',
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
full_name='Outgoing Webhook Bot',
|
2018-01-30 17:10:10 +01:00
|
|
|
payload_url=ujson.dumps('https://foo.bar.com'),
|
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT)
|
|
|
|
events = self.do_test(action, num_events=3)
|
|
|
|
# The third event is the second call of notify_created_bot, which contains additional
|
|
|
|
# data for services (in contrast to the first call).
|
|
|
|
error = get_bot_created_checker(bot_type="OUTGOING_WEBHOOK_BOT")('events[2]', events[2])
|
2014-02-26 00:12:14 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-01-30 19:21:13 +01:00
|
|
|
action = lambda: self.create_bot('test_embedded',
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
full_name='Embedded Bot',
|
2018-01-30 19:21:13 +01:00
|
|
|
service_name='helloworld',
|
|
|
|
config_data=ujson.dumps({'foo': 'bar'}),
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT)
|
|
|
|
events = self.do_test(action, num_events=3)
|
|
|
|
error = get_bot_created_checker(bot_type="EMBEDDED_BOT")('events[2]', events[2])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_full_name(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-04-07 07:28:28 +02:00
|
|
|
action = lambda: do_change_full_name(bot, 'New Bot Name', self.user_profile)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
|
2014-02-26 19:55:29 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_regenerate_bot_api_key(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-04-06 12:27:58 +02:00
|
|
|
action = lambda: do_regenerate_api_key(bot, self.user_profile)
|
2014-02-26 20:17:19 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
|
2014-02-26 20:17:19 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_avatar_source(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_avatar_fields(bot, bot.AVATAR_FROM_USER)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
|
2017-03-26 08:17:48 +02:00
|
|
|
self.assertEqual(events[1]['type'], 'realm_user')
|
2014-02-26 21:05:10 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_icon_source(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
realm = get_realm('zulip')
|
|
|
|
action = lambda: do_change_icon_source(realm, realm.ICON_FROM_GRAVATAR)
|
|
|
|
events = self.do_test(action, state_change_expected=False)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-26 20:35:23 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('icon')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('icon_url', check_string),
|
|
|
|
('icon_source', check_string),
|
|
|
|
])),
|
2017-02-21 03:41:20 +01:00
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_all_public_streams(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_default_all_public_streams(bot, True)
|
2014-02-26 21:15:31 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
|
2014-02-26 21:15:31 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_sending_stream(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, stream)
|
2014-02-26 21:23:18 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
|
2014-02-26 21:23:18 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 03:04:13 +01:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, None)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = self.realm_bot_schema('default_sending_stream', equals(None))('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_events_register_stream(self) -> None:
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, stream)
|
2014-02-26 21:34:12 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
|
2014-02-26 21:34:12 +01:00
|
|
|
self.assert_on_error(error)
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2017-03-24 03:04:13 +01:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, None)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = self.realm_bot_schema('default_events_register_stream', equals(None))('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_owner(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
change_bot_owner_checker = self.check_events_dict([
|
2017-02-24 06:36:54 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2017-02-24 06:36:54 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('owner_id', check_int),
|
|
|
|
])),
|
|
|
|
])
|
2017-05-07 17:21:26 +02:00
|
|
|
self.user_profile = self.example_user('iago')
|
|
|
|
owner = self.example_user('hamlet')
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-03-31 17:27:08 +02:00
|
|
|
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
|
2017-02-24 06:36:54 +01:00
|
|
|
events = self.do_test(action)
|
|
|
|
error = change_bot_owner_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-02-26 21:34:12 +01:00
|
|
|
|
2018-03-06 22:32:03 +01:00
|
|
|
change_bot_owner_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('delete')),
|
|
|
|
('bot', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
self.user_profile = self.example_user('aaron')
|
|
|
|
owner = self.example_user('hamlet')
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
bot = self.create_bot('test1', full_name='Test1 Testerson')
|
2018-03-06 22:32:03 +01:00
|
|
|
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = change_bot_owner_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
check_services = check_list(sub_validator=None, length=0)
|
|
|
|
change_bot_owner_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('bot', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('bot_type', check_int),
|
|
|
|
('full_name', check_string),
|
|
|
|
('is_active', check_bool),
|
|
|
|
('api_key', check_string),
|
|
|
|
('default_sending_stream', check_none_or(check_string)),
|
|
|
|
('default_events_register_stream', check_none_or(check_string)),
|
|
|
|
('default_all_public_streams', check_bool),
|
|
|
|
('avatar_url', check_string),
|
|
|
|
('owner', check_string),
|
|
|
|
('services', check_services),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
previous_owner = self.example_user('aaron')
|
|
|
|
self.user_profile = self.example_user('hamlet')
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
bot = self.create_test_bot('test2', previous_owner, full_name='Test2 Testerson')
|
2018-03-06 22:32:03 +01:00
|
|
|
action = lambda: do_change_bot_owner(bot, self.user_profile, previous_owner)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = change_bot_owner_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-01-16 20:34:12 +01:00
|
|
|
def test_do_update_outgoing_webhook_service(self):
|
|
|
|
# type: () -> None
|
|
|
|
update_outgoing_webhook_service_checker = self.check_events_dict([
|
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('bot', check_dict_only([
|
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
2018-03-16 16:46:51 +01:00
|
|
|
('services', check_list(check_dict_only([
|
2018-01-16 20:34:12 +01:00
|
|
|
('base_url', check_url),
|
|
|
|
('interface', check_int),
|
2018-05-30 11:09:35 +02:00
|
|
|
('token', check_string),
|
2018-01-16 20:34:12 +01:00
|
|
|
]))),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
self.user_profile = self.example_user('iago')
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_test_bot('test', self.user_profile,
|
|
|
|
full_name='Test Bot',
|
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
payload_url=ujson.dumps('http://hostname.domain2.com'),
|
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
)
|
2018-01-16 20:34:12 +01:00
|
|
|
action = lambda: do_update_outgoing_webhook_service(bot, 2, 'http://hostname.domain2.com')
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = update_outgoing_webhook_service_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_deactivate_user(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
bot_deactivate_checker = self.check_events_dict([
|
2014-02-26 22:27:51 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('remove')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2014-02-26 22:27:51 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-02-26 22:27:51 +01:00
|
|
|
])),
|
|
|
|
])
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2014-02-26 22:27:51 +01:00
|
|
|
action = lambda: do_deactivate_user(bot)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-02-26 22:27:51 +01:00
|
|
|
error = bot_deactivate_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_reactivate_user(self) -> None:
|
2017-04-20 17:31:41 +02:00
|
|
|
bot_reactivate_checker = self.check_events_dict([
|
2017-02-15 21:06:07 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2017-02-15 21:06:07 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
2017-06-12 19:50:03 +02:00
|
|
|
('bot_type', check_int),
|
2017-02-15 21:06:07 +01:00
|
|
|
('full_name', check_string),
|
|
|
|
('is_active', check_bool),
|
|
|
|
('api_key', check_string),
|
|
|
|
('default_sending_stream', check_none_or(check_string)),
|
|
|
|
('default_events_register_stream', check_none_or(check_string)),
|
|
|
|
('default_all_public_streams', check_bool),
|
|
|
|
('avatar_url', check_string),
|
|
|
|
('owner', check_none_or(check_string)),
|
2018-03-16 16:46:51 +01:00
|
|
|
('services', check_list(check_dict_only([
|
2018-01-16 20:34:12 +01:00
|
|
|
('base_url', check_url),
|
|
|
|
('interface', check_int),
|
|
|
|
]))),
|
2017-02-15 21:06:07 +01:00
|
|
|
])),
|
|
|
|
])
|
2018-01-30 17:08:35 +01:00
|
|
|
bot = self.create_bot('test')
|
2017-02-15 21:06:07 +01:00
|
|
|
do_deactivate_user(bot)
|
|
|
|
action = lambda: do_reactivate_user(bot)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2017-02-15 21:06:07 +01:00
|
|
|
error = bot_reactivate_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_mark_hotspot_as_read(self) -> None:
|
2017-10-12 17:13:02 +02:00
|
|
|
self.user_profile.tutorial_status = UserProfile.TUTORIAL_WAITING
|
|
|
|
self.user_profile.save(update_fields=['tutorial_status'])
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-01-24 01:48:35 +01:00
|
|
|
('type', equals('hotspots')),
|
2017-04-15 05:50:59 +02:00
|
|
|
('hotspots', check_list(check_dict_only([
|
|
|
|
('name', check_string),
|
2017-07-14 03:20:27 +02:00
|
|
|
('title', check_string),
|
2017-04-15 05:50:59 +02:00
|
|
|
('description', check_string),
|
2017-08-31 05:13:37 +02:00
|
|
|
('delay', check_float),
|
2017-04-15 05:50:59 +02:00
|
|
|
]))),
|
2017-01-24 01:48:35 +01:00
|
|
|
])
|
2017-08-30 02:13:04 +02:00
|
|
|
events = self.do_test(lambda: do_mark_hotspot_as_read(self.user_profile, 'intro_reply'))
|
2017-01-24 01:48:35 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-02-15 21:06:07 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_rename_stream(self) -> None:
|
2016-10-21 22:13:43 +02:00
|
|
|
stream = self.make_stream('old_name')
|
2014-02-02 15:30:33 +01:00
|
|
|
new_name = u'stream with a brand new name'
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.user_profile, stream.name)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-01-30 04:05:39 +01:00
|
|
|
action = lambda: do_rename_stream(stream, new_name)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('email_address')),
|
|
|
|
('value', check_string),
|
2017-03-05 01:50:25 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', equals('old_name')),
|
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('name')),
|
|
|
|
('value', equals(new_name)),
|
|
|
|
('name', equals('old_name')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
])
|
|
|
|
error = schema_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_deactivate_stream_neversubscribed(self) -> None:
|
2016-10-21 22:13:43 +02:00
|
|
|
stream = self.make_stream('old_name')
|
2016-07-12 23:57:16 +02:00
|
|
|
|
|
|
|
action = lambda: do_deactivate_stream(stream)
|
|
|
|
events = self.do_test(action)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-07-12 23:57:16 +02:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('delete')),
|
|
|
|
('streams', check_list(check_dict([]))),
|
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subscribe_other_user_never_subscribed(self) -> None:
|
2017-08-25 06:01:29 +02:00
|
|
|
action = lambda: self.subscribe(self.example_user("othello"), u"test_stream")
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_add_schema_checker = self.check_events_dict([
|
2016-07-12 23:57:16 +02:00
|
|
|
('type', equals('subscription')),
|
|
|
|
('op', equals('peer_add')),
|
2016-10-31 20:18:32 +01:00
|
|
|
('user_id', check_int),
|
2016-07-12 23:57:16 +02:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-03-24 05:49:23 +01:00
|
|
|
error = peer_add_schema_checker('events[1]', events[1])
|
2016-07-12 23:57:16 +02:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-10-28 00:46:59 +02:00
|
|
|
@slow("Actually several tests combined together")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subscribe_events(self) -> None:
|
2017-02-21 19:35:17 +01:00
|
|
|
self.do_test_subscribe_events(include_subscribers=True)
|
2017-02-20 08:30:09 +01:00
|
|
|
|
2017-10-28 00:46:59 +02:00
|
|
|
@slow("Actually several tests combined together")
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subscribe_events_no_include_subscribers(self) -> None:
|
2017-02-21 19:35:17 +01:00
|
|
|
self.do_test_subscribe_events(include_subscribers=False)
|
2017-02-20 08:30:09 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def do_test_subscribe_events(self, include_subscribers: bool) -> None:
|
2017-02-20 08:30:09 +01:00
|
|
|
subscription_fields = [
|
|
|
|
('color', check_string),
|
|
|
|
('description', check_string),
|
|
|
|
('email_address', check_string),
|
|
|
|
('invite_only', check_bool),
|
2018-05-13 15:50:48 +02:00
|
|
|
('is_announcement_only', check_bool),
|
2017-02-20 08:30:09 +01:00
|
|
|
('in_home_view', check_bool),
|
|
|
|
('name', check_string),
|
2018-07-24 01:50:57 +02:00
|
|
|
('audible_notifications', check_bool),
|
|
|
|
('email_notifications', check_bool),
|
2017-02-20 08:30:09 +01:00
|
|
|
('desktop_notifications', check_bool),
|
2017-08-17 16:55:32 +02:00
|
|
|
('push_notifications', check_bool),
|
2017-02-20 08:30:09 +01:00
|
|
|
('stream_id', check_int),
|
2018-05-07 23:14:15 +02:00
|
|
|
('history_public_to_subscribers', check_bool),
|
2018-07-24 01:50:57 +02:00
|
|
|
('pin_to_top', check_bool),
|
|
|
|
('stream_weekly_traffic', check_none_or(check_int)),
|
|
|
|
('is_old_stream', check_bool),
|
2017-02-20 08:30:09 +01:00
|
|
|
]
|
|
|
|
if include_subscribers:
|
|
|
|
subscription_fields.append(('subscribers', check_list(check_int))) # type: ignore
|
2014-02-04 20:52:02 +01:00
|
|
|
subscription_schema_checker = check_list(
|
2018-07-24 01:50:57 +02:00
|
|
|
check_dict_only(subscription_fields),
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
2017-04-20 17:31:41 +02:00
|
|
|
stream_create_schema_checker = self.check_events_dict([
|
2017-03-24 05:49:23 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('create')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('streams', check_list(check_dict_only([
|
2017-03-24 05:49:23 +01:00
|
|
|
('name', check_string),
|
|
|
|
('stream_id', check_int),
|
|
|
|
('invite_only', check_bool),
|
|
|
|
('description', check_string),
|
|
|
|
]))),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
add_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('add')),
|
|
|
|
('subscriptions', subscription_schema_checker),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
remove_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('remove')),
|
|
|
|
('subscriptions', check_list(
|
2017-04-20 17:31:41 +02:00
|
|
|
check_dict_only([
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', equals('test_stream')),
|
2014-02-07 19:06:02 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
]),
|
|
|
|
)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_add_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('peer_add')),
|
2016-10-31 20:18:32 +01:00
|
|
|
('user_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_remove_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('peer_remove')),
|
2016-11-08 15:04:18 +01:00
|
|
|
('user_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
stream_update_schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('description')),
|
|
|
|
('value', check_string),
|
2017-03-05 01:50:25 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', check_string),
|
|
|
|
])
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Subscribe to a totally new stream, so it's just Hamlet on it
|
2017-11-02 18:40:47 +01:00
|
|
|
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream") # type: Callable[[], Any]
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action, event_types=["subscription", "realm_user"],
|
|
|
|
include_subscribers=include_subscribers)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = add_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Add another user to that totally new stream
|
2017-08-25 06:01:29 +02:00
|
|
|
action = lambda: self.subscribe(self.example_user("othello"), "test_stream")
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-02-21 19:35:17 +01:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = peer_add_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now remove the first user, to test the normal unsubscribe flow
|
2016-10-20 16:53:22 +02:00
|
|
|
action = lambda: bulk_remove_subscriptions(
|
2017-05-07 17:21:26 +02:00
|
|
|
[self.example_user('othello')],
|
2018-03-14 00:13:21 +01:00
|
|
|
[stream],
|
|
|
|
get_client("website"))
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-02-21 19:35:17 +01:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = peer_remove_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now remove the second user, to test the 'vacate' event flow
|
2016-10-20 16:53:22 +02:00
|
|
|
action = lambda: bulk_remove_subscriptions(
|
2017-05-07 17:21:26 +02:00
|
|
|
[self.example_user('hamlet')],
|
2018-03-14 00:13:21 +01:00
|
|
|
[stream],
|
|
|
|
get_client("website"))
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-03-26 08:17:48 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2017-11-13 21:24:51 +01:00
|
|
|
num_events=3)
|
2018-08-07 15:26:04 +02:00
|
|
|
error = remove_schema_checker('events[0]', events[0])
|
2014-02-04 20:52:02 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now resubscribe a user, to make sure that works on a vacated stream
|
2017-08-25 06:01:29 +02:00
|
|
|
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream")
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-03-26 08:17:48 +02:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
num_events=2)
|
2014-03-02 06:46:54 +01:00
|
|
|
error = add_schema_checker('events[1]', events[1])
|
2014-02-04 20:52:02 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-30 04:14:12 +01:00
|
|
|
action = lambda: do_change_stream_description(stream, u'new description')
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
|
|
|
include_subscribers=include_subscribers)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = stream_update_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:49:23 +01:00
|
|
|
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
|
|
|
|
stream = self.make_stream("private", get_realm("zulip"), invite_only=True)
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-03-24 05:49:23 +01:00
|
|
|
action = lambda: bulk_add_subscriptions([stream], [user_profile])
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, include_subscribers=include_subscribers,
|
|
|
|
num_events=2)
|
2017-03-24 05:49:23 +01:00
|
|
|
error = stream_create_schema_checker('events[0]', events[0])
|
|
|
|
error = add_schema_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-04-02 00:21:21 +02:00
|
|
|
def test_do_delete_message_stream(self) -> None:
|
2017-05-14 21:14:26 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('delete_message')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('sender', check_string),
|
2018-04-02 00:21:21 +02:00
|
|
|
('message_type', equals("stream")),
|
|
|
|
('stream_id', check_int),
|
|
|
|
('topic', check_string),
|
2017-05-14 21:14:26 +02:00
|
|
|
])
|
2017-10-28 16:40:28 +02:00
|
|
|
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_delete_message(self.user_profile, message),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2018-04-02 00:21:21 +02:00
|
|
|
def test_do_delete_message_personal(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('delete_message')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('sender', check_string),
|
|
|
|
('message_type', equals("private")),
|
|
|
|
('recipient_user_ids', check_int),
|
|
|
|
])
|
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
self.example_email("cordelia"),
|
|
|
|
self.user_profile.email,
|
|
|
|
"hello",
|
|
|
|
)
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_delete_message(self.user_profile, message),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_delete_message_no_max_id(self) -> None:
|
2017-05-14 21:14:26 +02:00
|
|
|
user_profile = self.example_user('aaron')
|
|
|
|
# Delete all historical messages for this user
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
2017-10-28 16:40:28 +02:00
|
|
|
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
self.do_test(
|
|
|
|
lambda: do_delete_message(self.user_profile, message),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2017-11-02 20:55:44 +01:00
|
|
|
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
|
2017-05-14 21:14:26 +02:00
|
|
|
self.assertEqual(result['max_message_id'], -1)
|
|
|
|
|
2018-05-04 22:57:36 +02:00
|
|
|
def test_add_attachment(self) -> None:
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('attachment')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('attachment', check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
('name', check_string),
|
|
|
|
('size', check_int),
|
|
|
|
('path_id', check_string),
|
|
|
|
('create_time', check_float),
|
|
|
|
('messages', check_list(check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
('name', check_float),
|
|
|
|
]))),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.login(self.example_email("hamlet"))
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
data = {'uri': None}
|
|
|
|
|
|
|
|
def do_upload() -> None:
|
|
|
|
result = self.client_post("/json/user_uploads", {'file': fp})
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", result.json())
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
base = '/user_uploads/'
|
|
|
|
self.assertEqual(base, uri[:len(base)])
|
|
|
|
data['uri'] = uri
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_upload(),
|
|
|
|
num_events=1, state_change_expected=False)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
# Verify that the DB has the attachment marked as unclaimed
|
|
|
|
entry = Attachment.objects.get(file_name='zulip.txt')
|
|
|
|
self.assertEqual(entry.is_claimed(), False)
|
|
|
|
|
|
|
|
# Now we send an actual message using this attachment.
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('attachment')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('attachment', check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
('name', check_string),
|
|
|
|
('size', check_int),
|
|
|
|
('path_id', check_string),
|
|
|
|
('create_time', check_float),
|
|
|
|
('messages', check_list(check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
('name', check_float),
|
|
|
|
]))),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
|
|
|
body = "First message ...[zulip.txt](http://localhost:9991" + data['uri'] + ")"
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: self.send_stream_message(self.example_email("hamlet"), "Denmark", body, "test"),
|
|
|
|
num_events=2)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
# Now remove the attachment
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('attachment')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('attachment', check_dict_only([
|
|
|
|
('id', check_int),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: self.client_delete("/json/attachments/%s" % (entry.id,)),
|
|
|
|
num_events=1, state_change_expected=False)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class FetchInitialStateDataTest(ZulipTestCase):
|
2016-04-28 01:15:06 +02:00
|
|
|
# Non-admin users don't have access to all bots
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_bots_non_admin(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('cordelia')
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_admin)
|
2017-11-02 20:55:44 +01:00
|
|
|
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assert_length(result['realm_bots'], 0)
|
|
|
|
|
|
|
|
# additionally the API key for a random bot is not present in the data
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(self.notification_bot())
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assertNotIn(api_key, str(result))
|
|
|
|
|
|
|
|
# Admin users have access to all bots in the realm_bots field
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_bots_admin(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2016-04-28 01:15:06 +02:00
|
|
|
do_change_is_admin(user_profile, True)
|
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
2017-11-02 20:55:44 +01:00
|
|
|
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assertTrue(len(result['realm_bots']) > 5)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_max_message_id_with_no_history(self) -> None:
|
2017-05-24 02:42:31 +02:00
|
|
|
user_profile = self.example_user('aaron')
|
2017-03-24 06:38:06 +01:00
|
|
|
# Delete all historical messages for this user
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
2017-11-02 20:55:44 +01:00
|
|
|
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
|
2017-03-24 06:38:06 +01:00
|
|
|
self.assertEqual(result['max_message_id'], -1)
|
|
|
|
|
2017-11-10 15:26:30 +01:00
|
|
|
class GetUnreadMsgsTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def mute_stream(self, user_profile: UserProfile, stream: Stream) -> None:
|
2017-11-10 15:26:30 +01:00
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscription = Subscription.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient
|
|
|
|
)
|
|
|
|
subscription.in_home_view = False
|
|
|
|
subscription.save()
|
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def mute_topic(self, user_profile: UserProfile, stream_name: str,
|
|
|
|
topic_name: str) -> None:
|
2017-11-10 15:26:30 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
recipient = get_stream_recipient(stream.id)
|
|
|
|
|
|
|
|
add_topic_mute(
|
|
|
|
user_profile=user_profile,
|
|
|
|
stream_id=stream.id,
|
|
|
|
recipient_id=recipient.id,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_raw_unread_stream(self) -> None:
|
2017-11-10 17:00:31 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
for stream_name in ['social', 'devel', 'test here']:
|
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
self.subscribe(cordelia, stream_name)
|
|
|
|
|
|
|
|
all_message_ids = set() # type: Set[int]
|
|
|
|
message_ids = dict()
|
|
|
|
|
|
|
|
tups = [
|
|
|
|
('social', 'lunch'),
|
|
|
|
('test here', 'bla'),
|
|
|
|
('devel', 'python'),
|
|
|
|
('devel', 'ruby'),
|
|
|
|
]
|
|
|
|
|
|
|
|
for stream_name, topic_name in tups:
|
|
|
|
message_ids[topic_name] = [
|
|
|
|
self.send_stream_message(
|
|
|
|
sender_email=cordelia.email,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
) for i in range(3)
|
|
|
|
]
|
|
|
|
all_message_ids |= set(message_ids[topic_name])
|
|
|
|
|
|
|
|
self.assertEqual(len(all_message_ids), 12) # sanity check on test setup
|
|
|
|
|
|
|
|
self.mute_stream(
|
|
|
|
user_profile=hamlet,
|
|
|
|
stream=get_stream('test here', realm),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.mute_topic(
|
|
|
|
user_profile=hamlet,
|
|
|
|
stream_name='devel',
|
|
|
|
topic_name='ruby',
|
|
|
|
)
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
stream_dict = raw_unread_data['stream_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(stream_dict.keys()),
|
|
|
|
all_message_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
raw_unread_data['unmuted_stream_msgs'],
|
|
|
|
set(message_ids['python']) | set(message_ids['lunch']),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
stream_dict[message_ids['lunch'][0]],
|
|
|
|
dict(
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
stream_id=get_stream('social', realm).id,
|
|
|
|
topic='lunch',
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_raw_unread_huddle(self) -> None:
|
2017-11-10 16:24:31 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
prospero = self.example_user('prospero')
|
|
|
|
|
|
|
|
huddle1_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia.email,
|
|
|
|
[hamlet.email, othello.email]
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
huddle2_message_ids = [
|
|
|
|
self.send_huddle_message(
|
|
|
|
cordelia.email,
|
|
|
|
[hamlet.email, prospero.email]
|
|
|
|
)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_dict = raw_unread_data['huddle_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(huddle_dict.keys()),
|
|
|
|
set(huddle1_message_ids) | set(huddle2_message_ids)
|
|
|
|
)
|
|
|
|
|
|
|
|
huddle_string = ','.join(
|
|
|
|
str(uid)
|
|
|
|
for uid in sorted([cordelia.id, hamlet.id, othello.id])
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
huddle_dict[huddle1_message_ids[0]],
|
|
|
|
dict(user_ids_string=huddle_string),
|
|
|
|
)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_raw_unread_personal(self) -> None:
|
2017-11-10 15:49:42 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
|
|
|
|
cordelia_pm_message_ids = [
|
|
|
|
self.send_personal_message(cordelia.email, hamlet.email)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
othello_pm_message_ids = [
|
|
|
|
self.send_personal_message(othello.email, hamlet.email)
|
|
|
|
for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
raw_unread_data = get_raw_unread_data(
|
|
|
|
user_profile=hamlet,
|
|
|
|
)
|
|
|
|
|
|
|
|
pm_dict = raw_unread_data['pm_dict']
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
set(pm_dict.keys()),
|
|
|
|
set(cordelia_pm_message_ids) | set(othello_pm_message_ids)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
pm_dict[cordelia_pm_message_ids[0]],
|
|
|
|
dict(sender_id=cordelia.id),
|
|
|
|
)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_unread_msgs(self) -> None:
|
2017-05-23 03:02:01 +02:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
sender_id = cordelia.id
|
|
|
|
sender_email = cordelia.email
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
othello = self.example_user('othello')
|
|
|
|
|
|
|
|
# our tests rely on order
|
|
|
|
assert(sender_email < user_profile.email)
|
|
|
|
assert(user_profile.email < othello.email)
|
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
pm1_message_id = self.send_personal_message(sender_email, user_profile.email, "hello1")
|
|
|
|
pm2_message_id = self.send_personal_message(sender_email, user_profile.email, "hello2")
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
muted_stream = self.subscribe(user_profile, 'Muted Stream')
|
2017-11-10 15:26:30 +01:00
|
|
|
self.mute_stream(user_profile, muted_stream)
|
|
|
|
self.mute_topic(user_profile, 'Denmark', 'muted-topic')
|
2017-08-31 23:19:05 +02:00
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
stream_message_id = self.send_stream_message(sender_email, "Denmark", "hello")
|
|
|
|
muted_stream_message_id = self.send_stream_message(sender_email, "Muted Stream", "hello")
|
|
|
|
muted_topic_message_id = self.send_stream_message(
|
|
|
|
sender_email,
|
|
|
|
"Denmark",
|
|
|
|
topic_name="muted-topic",
|
|
|
|
content="hello",
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-10-28 16:40:28 +02:00
|
|
|
huddle_message_id = self.send_huddle_message(
|
|
|
|
sender_email,
|
|
|
|
[user_profile.email, othello.email],
|
|
|
|
'hello3',
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_unread_data() -> UnreadMessagesResult:
|
2017-11-10 15:14:13 +01:00
|
|
|
raw_unread_data = get_raw_unread_data(user_profile)
|
|
|
|
aggregated_data = aggregate_unread_data(raw_unread_data)
|
|
|
|
return aggregated_data
|
2017-07-21 20:31:25 +02:00
|
|
|
|
|
|
|
result = get_unread_data()
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-08-23 22:45:50 +02:00
|
|
|
# The count here reflects the count of unread messages that we will
|
|
|
|
# report to users in the bankruptcy dialog, and for now it excludes unread messages
|
|
|
|
# from muted treams, but it doesn't exclude unread messages from muted topics yet.
|
|
|
|
self.assertEqual(result['count'], 4)
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
unread_pm = result['pms'][0]
|
|
|
|
self.assertEqual(unread_pm['sender_id'], sender_id)
|
|
|
|
self.assertEqual(unread_pm['unread_message_ids'], [pm1_message_id, pm2_message_id])
|
2017-10-05 18:35:34 +02:00
|
|
|
self.assertTrue('sender_ids' not in unread_pm)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
|
|
|
unread_stream = result['streams'][0]
|
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
|
2017-08-31 23:19:05 +02:00
|
|
|
self.assertEqual(unread_stream['topic'], 'muted-topic')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [muted_topic_message_id])
|
2017-10-05 18:35:34 +02:00
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
2017-08-31 23:19:05 +02:00
|
|
|
|
|
|
|
unread_stream = result['streams'][1]
|
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
|
2017-05-23 03:02:01 +02:00
|
|
|
self.assertEqual(unread_stream['topic'], 'test')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [stream_message_id])
|
2017-10-05 18:35:34 +02:00
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-08-31 23:19:05 +02:00
|
|
|
unread_stream = result['streams'][2]
|
2017-08-23 22:45:50 +02:00
|
|
|
self.assertEqual(unread_stream['stream_id'], get_stream('Muted Stream', user_profile.realm).id)
|
|
|
|
self.assertEqual(unread_stream['topic'], 'test')
|
|
|
|
self.assertEqual(unread_stream['unread_message_ids'], [muted_stream_message_id])
|
2017-10-05 18:35:34 +02:00
|
|
|
self.assertEqual(unread_stream['sender_ids'], [sender_id])
|
2017-08-23 22:45:50 +02:00
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
huddle_string = ','.join(str(uid) for uid in sorted([sender_id, user_profile.id, othello.id]))
|
|
|
|
|
|
|
|
unread_huddle = result['huddles'][0]
|
|
|
|
self.assertEqual(unread_huddle['user_ids_string'], huddle_string)
|
|
|
|
self.assertEqual(unread_huddle['unread_message_ids'], [huddle_message_id])
|
2017-10-05 18:35:34 +02:00
|
|
|
self.assertTrue('sender_ids' not in unread_huddle)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2017-07-21 20:31:25 +02:00
|
|
|
self.assertEqual(result['mentions'], [])
|
|
|
|
|
|
|
|
um = UserMessage.objects.get(
|
|
|
|
user_profile_id=user_profile.id,
|
|
|
|
message_id=stream_message_id
|
|
|
|
)
|
|
|
|
um.flags |= UserMessage.flags.mentioned
|
|
|
|
um.save()
|
|
|
|
|
|
|
|
result = get_unread_data()
|
|
|
|
self.assertEqual(result['mentions'], [stream_message_id])
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
class EventQueueTest(TestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_one_event(self) -> None:
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertFalse(queue.empty())
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 0,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"}])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_event_collapsing(self) -> None:
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(1, 10):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 8,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 9,
|
|
|
|
"timestamp": "9"}])
|
|
|
|
|
|
|
|
queue = EventQueue("2")
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(1, 10):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
queue.push({"type": "unknown"})
|
|
|
|
queue.push({"type": "restart", "server_generation": "1"})
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(11, 20):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
queue.push({"type": "restart", "server_generation": "2"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{"type": "unknown",
|
2016-11-30 21:55:59 +01:00
|
|
|
"id": 9},
|
2014-01-31 23:23:39 +01:00
|
|
|
{'id': 19,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 19,
|
|
|
|
"timestamp": "19"},
|
|
|
|
{"id": 20,
|
|
|
|
"type": "restart",
|
|
|
|
"server_generation": "2"}])
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(21, 23):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{"type": "unknown",
|
2016-11-30 21:55:59 +01:00
|
|
|
"id": 9},
|
2014-01-31 23:23:39 +01:00
|
|
|
{'id': 19,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 19,
|
|
|
|
"timestamp": "19"},
|
|
|
|
{"id": 20,
|
|
|
|
"type": "restart",
|
|
|
|
"server_generation": "2"},
|
|
|
|
{'id': 22,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 22,
|
|
|
|
"timestamp": "22"},
|
|
|
|
])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_flag_add_collapsing(self) -> None:
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "read",
|
|
|
|
"operation": "add",
|
|
|
|
"all": False,
|
|
|
|
"messages": [1, 2, 3, 4],
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "read",
|
|
|
|
"all": False,
|
|
|
|
"operation": "add",
|
|
|
|
"messages": [5, 6],
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 1,
|
|
|
|
'type': 'update_message_flags',
|
|
|
|
"all": False,
|
|
|
|
"flag": "read",
|
|
|
|
"operation": "add",
|
2015-11-01 17:10:46 +01:00
|
|
|
"messages": [1, 2, 3, 4, 5, 6],
|
2014-01-31 23:23:39 +01:00
|
|
|
"timestamp": "1"}])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_flag_remove_collapsing(self) -> None:
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "collapsed",
|
|
|
|
"operation": "remove",
|
|
|
|
"all": False,
|
|
|
|
"messages": [1, 2, 3, 4],
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "collapsed",
|
|
|
|
"all": False,
|
|
|
|
"operation": "remove",
|
|
|
|
"messages": [5, 6],
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 1,
|
|
|
|
'type': 'update_message_flags',
|
|
|
|
"all": False,
|
|
|
|
"flag": "collapsed",
|
|
|
|
"operation": "remove",
|
2015-11-01 17:10:46 +01:00
|
|
|
"messages": [1, 2, 3, 4, 5, 6],
|
2014-01-31 23:23:39 +01:00
|
|
|
"timestamp": "1"}])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_collapse_event(self) -> None:
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "unknown",
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 0,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"},
|
|
|
|
{'id': 1,
|
|
|
|
'type': 'unknown',
|
|
|
|
"timestamp": "1"}])
|
|
|
|
|
2017-10-26 22:10:52 +02:00
|
|
|
class ClientDescriptorsTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_client_info_for_all_public_streams(self) -> None:
|
2017-10-26 22:10:52 +02:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=True,
|
|
|
|
apply_markdown=True,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=True,
|
2017-10-26 22:10:52 +02:00
|
|
|
client_type_name='website',
|
|
|
|
event_types=['message'],
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
|
|
|
stream_name='whatever',
|
|
|
|
)
|
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(len(client_info), 1)
|
|
|
|
|
|
|
|
dct = client_info[client.event_queue.id]
|
|
|
|
self.assertEqual(dct['client'].apply_markdown, True)
|
2017-10-31 18:36:18 +01:00
|
|
|
self.assertEqual(dct['client'].client_gravatar, True)
|
2017-10-26 22:10:52 +02:00
|
|
|
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
|
2018-03-28 21:42:06 +02:00
|
|
|
self.assertEqual(dct['flags'], [])
|
2017-10-26 22:10:52 +02:00
|
|
|
self.assertEqual(dct['is_sender'], False)
|
|
|
|
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
|
|
|
stream_name='whatever',
|
|
|
|
sender_queue_id=client.event_queue.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[],
|
|
|
|
)
|
|
|
|
dct = client_info[client.event_queue.id]
|
|
|
|
self.assertEqual(dct['is_sender'], True)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_client_info_for_normal_users(self) -> None:
|
2017-10-26 22:10:52 +02:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
realm = hamlet.realm
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_get_info(apply_markdown: bool, client_gravatar: bool) -> None:
|
2017-11-02 17:19:54 +01:00
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
|
|
|
queue_data = dict(
|
|
|
|
all_public_streams=False,
|
|
|
|
apply_markdown=apply_markdown,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2017-11-02 17:19:54 +01:00
|
|
|
client_type_name='website',
|
|
|
|
event_types=['message'],
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
queue_timeout=0,
|
|
|
|
realm_id=realm.id,
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-02 17:19:54 +01:00
|
|
|
client = allocate_client_descriptor(queue_data)
|
|
|
|
message_event = dict(
|
|
|
|
realm_id=realm.id,
|
|
|
|
stream_name='whatever',
|
|
|
|
)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-02 17:19:54 +01:00
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[
|
|
|
|
dict(id=cordelia.id),
|
|
|
|
],
|
|
|
|
)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-02 17:19:54 +01:00
|
|
|
self.assertEqual(len(client_info), 0)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-02 17:19:54 +01:00
|
|
|
client_info = get_client_info_for_message_event(
|
|
|
|
message_event,
|
|
|
|
users=[
|
|
|
|
dict(id=cordelia.id),
|
|
|
|
dict(id=hamlet.id, flags=['mentioned']),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(len(client_info), 1)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-02 17:19:54 +01:00
|
|
|
dct = client_info[client.event_queue.id]
|
|
|
|
self.assertEqual(dct['client'].apply_markdown, apply_markdown)
|
2017-10-31 18:36:18 +01:00
|
|
|
self.assertEqual(dct['client'].client_gravatar, client_gravatar)
|
2017-11-02 17:19:54 +01:00
|
|
|
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
|
|
|
|
self.assertEqual(dct['flags'], ['mentioned'])
|
|
|
|
self.assertEqual(dct['is_sender'], False)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-10-31 18:36:18 +01:00
|
|
|
test_get_info(apply_markdown=False, client_gravatar=False)
|
|
|
|
test_get_info(apply_markdown=True, client_gravatar=False)
|
|
|
|
|
|
|
|
test_get_info(apply_markdown=False, client_gravatar=True)
|
|
|
|
test_get_info(apply_markdown=True, client_gravatar=True)
|
2017-10-26 22:10:52 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_process_message_event_with_mocked_client_info(self) -> None:
|
2017-10-26 22:10:52 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
|
2017-11-05 11:49:43 +01:00
|
|
|
class MockClient:
|
2017-11-05 10:51:25 +01:00
|
|
|
def __init__(self, user_profile_id: int,
|
|
|
|
apply_markdown: bool,
|
|
|
|
client_gravatar: bool) -> None:
|
2017-10-26 22:10:52 +02:00
|
|
|
self.user_profile_id = user_profile_id
|
|
|
|
self.apply_markdown = apply_markdown
|
2017-10-31 18:36:18 +01:00
|
|
|
self.client_gravatar = client_gravatar
|
2017-10-26 22:10:52 +02:00
|
|
|
self.client_type_name = 'whatever'
|
|
|
|
self.events = [] # type: List[Dict[str, Any]]
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def accepts_messages(self) -> bool:
|
2017-10-26 22:10:52 +02:00
|
|
|
return True
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def accepts_event(self, event: Dict[str, Any]) -> bool:
|
2017-10-26 22:10:52 +02:00
|
|
|
assert(event['type'] == 'message')
|
|
|
|
return True
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def add_event(self, event: Dict[str, Any]) -> None:
|
2017-10-26 22:10:52 +02:00
|
|
|
self.events.append(event)
|
|
|
|
|
|
|
|
client1 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=True,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=False,
|
2017-10-26 22:10:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
client2 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=False,
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
client3 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
client4 = MockClient(
|
|
|
|
user_profile_id=hamlet.id,
|
|
|
|
apply_markdown=False,
|
|
|
|
client_gravatar=True,
|
2017-10-26 22:10:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
client_info = {
|
|
|
|
'client:1': dict(
|
|
|
|
client=client1,
|
|
|
|
flags=['starred'],
|
|
|
|
),
|
|
|
|
'client:2': dict(
|
|
|
|
client=client2,
|
|
|
|
flags=['has_alert_word'],
|
|
|
|
),
|
2017-10-31 18:36:18 +01:00
|
|
|
'client:3': dict(
|
|
|
|
client=client3,
|
|
|
|
flags=[],
|
|
|
|
),
|
|
|
|
'client:4': dict(
|
|
|
|
client=client4,
|
|
|
|
flags=[],
|
|
|
|
),
|
2017-10-26 22:10:52 +02:00
|
|
|
}
|
|
|
|
|
2017-10-31 03:02:23 +01:00
|
|
|
sender = hamlet
|
2017-10-26 22:10:52 +02:00
|
|
|
|
|
|
|
message_event = dict(
|
2017-10-20 21:34:05 +02:00
|
|
|
message_dict=dict(
|
2017-10-26 22:10:52 +02:00
|
|
|
id=999,
|
|
|
|
content='**hello**',
|
2017-10-20 21:34:05 +02:00
|
|
|
rendered_content='<b>hello</b>',
|
2017-10-31 03:02:23 +01:00
|
|
|
sender_id=sender.id,
|
2017-10-26 22:10:52 +02:00
|
|
|
type='stream',
|
|
|
|
client='website',
|
2017-10-20 21:34:05 +02:00
|
|
|
|
|
|
|
# NOTE: Some of these fields are clutter, but some
|
|
|
|
# will be useful when we let clients specify
|
|
|
|
# that they can compute their own gravatar URLs.
|
2017-10-31 03:02:23 +01:00
|
|
|
sender_email=sender.email,
|
|
|
|
sender_realm_id=sender.realm_id,
|
2017-10-31 18:36:18 +01:00
|
|
|
sender_avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
sender_avatar_version=1,
|
2017-10-20 21:34:05 +02:00
|
|
|
sender_is_mirror_dummy=None,
|
|
|
|
raw_display_recipient=None,
|
|
|
|
recipient_type=None,
|
|
|
|
recipient_type_id=None,
|
2017-10-26 22:10:52 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Setting users to `[]` bypasses code we don't care about
|
|
|
|
# for this test--we assume client_info is correct in our mocks,
|
|
|
|
# and we are interested in how messages are put on event queue.
|
|
|
|
users = [] # type: List[Any]
|
|
|
|
|
|
|
|
with mock.patch('zerver.tornado.event_queue.get_client_info_for_message_event',
|
|
|
|
return_value=client_info):
|
|
|
|
process_message_event(message_event, users)
|
|
|
|
|
2017-10-31 03:02:23 +01:00
|
|
|
# We are not closely examining avatar_url at this point, so
|
|
|
|
# just sanity check them and then delete the keys so that
|
|
|
|
# upcoming comparisons work.
|
|
|
|
for client in [client1, client2]:
|
|
|
|
message = client.events[0]['message']
|
|
|
|
self.assertIn('gravatar.com', message['avatar_url'])
|
|
|
|
message.pop('avatar_url')
|
|
|
|
|
2017-10-26 22:10:52 +02:00
|
|
|
self.assertEqual(client1.events, [
|
|
|
|
dict(
|
|
|
|
type='message',
|
|
|
|
message=dict(
|
|
|
|
type='stream',
|
2017-10-31 03:02:23 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
2017-10-26 22:10:52 +02:00
|
|
|
id=999,
|
|
|
|
content='<b>hello</b>',
|
2017-10-20 21:34:05 +02:00
|
|
|
content_type='text/html',
|
2017-10-26 22:10:52 +02:00
|
|
|
client='website',
|
|
|
|
),
|
|
|
|
flags=['starred'],
|
|
|
|
),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.assertEqual(client2.events, [
|
|
|
|
dict(
|
|
|
|
type='message',
|
|
|
|
message=dict(
|
|
|
|
type='stream',
|
2017-10-31 03:02:23 +01:00
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
2017-10-26 22:10:52 +02:00
|
|
|
id=999,
|
|
|
|
content='**hello**',
|
2017-10-20 21:34:05 +02:00
|
|
|
content_type='text/x-markdown',
|
2017-10-26 22:10:52 +02:00
|
|
|
client='website',
|
|
|
|
),
|
|
|
|
flags=['has_alert_word'],
|
|
|
|
),
|
|
|
|
])
|
|
|
|
|
2017-10-31 18:36:18 +01:00
|
|
|
self.assertEqual(client3.events, [
|
|
|
|
dict(
|
|
|
|
type='message',
|
|
|
|
message=dict(
|
|
|
|
type='stream',
|
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
avatar_url=None,
|
|
|
|
id=999,
|
|
|
|
content='<b>hello</b>',
|
|
|
|
content_type='text/html',
|
|
|
|
client='website',
|
|
|
|
),
|
|
|
|
flags=[],
|
|
|
|
),
|
|
|
|
])
|
|
|
|
|
|
|
|
self.assertEqual(client4.events, [
|
|
|
|
dict(
|
|
|
|
type='message',
|
|
|
|
message=dict(
|
|
|
|
type='stream',
|
|
|
|
sender_id=sender.id,
|
|
|
|
sender_email=sender.email,
|
|
|
|
avatar_url=None,
|
|
|
|
id=999,
|
|
|
|
content='**hello**',
|
|
|
|
content_type='text/x-markdown',
|
|
|
|
client='website',
|
|
|
|
),
|
|
|
|
flags=[],
|
|
|
|
),
|
|
|
|
])
|
|
|
|
|
2017-10-21 23:10:22 +02:00
|
|
|
class FetchQueriesTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_queries(self) -> None:
|
2017-10-21 23:10:22 +02:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
|
|
|
|
self.login(user.email)
|
|
|
|
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
|
|
|
with mock.patch('zerver.lib.events.always_want') as want_mock:
|
|
|
|
fetch_initial_state_data(
|
|
|
|
user_profile=user,
|
|
|
|
event_types=None,
|
|
|
|
queue_id='x',
|
2017-11-02 20:55:44 +01:00
|
|
|
client_gravatar=False,
|
2017-10-21 23:10:22 +02:00
|
|
|
)
|
|
|
|
|
2018-08-14 23:57:20 +02:00
|
|
|
self.assert_length(queries, 30)
|
2017-10-21 23:10:22 +02:00
|
|
|
|
|
|
|
expected_counts = dict(
|
|
|
|
alert_words=0,
|
|
|
|
custom_profile_fields=1,
|
|
|
|
default_streams=1,
|
2017-11-01 18:20:34 +01:00
|
|
|
default_stream_groups=1,
|
2017-10-21 23:10:22 +02:00
|
|
|
hotspots=0,
|
|
|
|
message=1,
|
|
|
|
muted_topics=1,
|
|
|
|
pointer=0,
|
|
|
|
presence=3,
|
|
|
|
realm=0,
|
|
|
|
realm_bot=1,
|
|
|
|
realm_domains=1,
|
|
|
|
realm_embedded_bots=0,
|
|
|
|
realm_emoji=1,
|
|
|
|
realm_filters=1,
|
2018-03-12 01:27:08 +01:00
|
|
|
realm_user=3,
|
2017-11-30 01:09:23 +01:00
|
|
|
realm_user_groups=2,
|
2018-08-14 23:57:20 +02:00
|
|
|
starred_messages=1,
|
2017-10-21 23:10:22 +02:00
|
|
|
stream=2,
|
2017-04-03 17:13:42 +02:00
|
|
|
subscription=6,
|
2017-10-21 23:10:22 +02:00
|
|
|
update_display_settings=0,
|
|
|
|
update_global_notifications=0,
|
|
|
|
update_message_flags=5,
|
|
|
|
zulip_version=0,
|
|
|
|
)
|
|
|
|
|
|
|
|
wanted_event_types = {
|
|
|
|
item[0][0] for item
|
|
|
|
in want_mock.call_args_list
|
|
|
|
}
|
|
|
|
|
|
|
|
self.assertEqual(wanted_event_types, set(expected_counts))
|
|
|
|
|
|
|
|
for event_type in sorted(wanted_event_types):
|
|
|
|
count = expected_counts[event_type]
|
|
|
|
flush_per_request_caches()
|
|
|
|
with queries_captured() as queries:
|
|
|
|
if event_type == 'update_message_flags':
|
|
|
|
event_types = ['update_message_flags', 'message']
|
|
|
|
else:
|
|
|
|
event_types = [event_type]
|
|
|
|
|
|
|
|
fetch_initial_state_data(
|
|
|
|
user_profile=user,
|
|
|
|
event_types=event_types,
|
|
|
|
queue_id='x',
|
2017-11-02 20:55:44 +01:00
|
|
|
client_gravatar=False,
|
2017-10-21 23:10:22 +02:00
|
|
|
)
|
|
|
|
self.assert_length(queries, count)
|
|
|
|
|
|
|
|
|
2017-05-07 21:25:59 +02:00
|
|
|
class TestEventsRegisterAllPublicStreamsDefaults(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
|
|
|
self.email = self.user_profile.email
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_all_public_true_default_false(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_all_public_true_default(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_all_public_false_default_false(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_all_public_false_default_true(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_true_default_for_none(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_false_default_for_none(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
2017-05-07 21:25:59 +02:00
|
|
|
class TestEventsRegisterNarrowDefaults(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
|
|
|
self.email = self.user_profile.email
|
2014-02-07 01:22:19 +01:00
|
|
|
self.stream = get_stream('Verona', self.user_profile.realm)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_narrow_no_default(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
2016-06-05 04:58:33 +02:00
|
|
|
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
|
|
|
|
self.assertEqual(result, [[u'stream', u'my_stream']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_passed_narrow_with_default(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
2016-06-05 04:58:33 +02:00
|
|
|
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
|
|
|
|
self.assertEqual(result, [[u'stream', u'my_stream']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_default_if_narrow_is_empty(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
2016-06-05 04:58:33 +02:00
|
|
|
self.assertEqual(result, [[u'stream', u'Verona']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_use_narrow_if_default_is_none(self) -> None:
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
|
|
|
self.assertEqual(result, [])
|