2014-01-31 23:23:39 +01:00
|
|
|
# -*- coding: utf-8 -*-
|
2017-02-12 01:59:28 +01:00
|
|
|
# See http://zulip.readthedocs.io/en/latest/events-system.html for
|
|
|
|
# high-level documentation on how this system works.
|
2015-11-01 17:15:05 +01:00
|
|
|
from __future__ import absolute_import
|
2017-02-21 23:26:31 +01:00
|
|
|
from __future__ import print_function
|
2017-04-20 17:31:41 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Union, Text, Tuple
|
2017-05-22 23:36:12 +02:00
|
|
|
import os
|
|
|
|
import shutil
|
2016-06-03 08:00:04 +02:00
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
from django.conf import settings
|
2016-06-04 19:54:34 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2014-01-31 23:23:39 +01:00
|
|
|
from django.test import TestCase
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
from zerver.models import (
|
2017-05-24 02:42:31 +02:00
|
|
|
get_client, get_realm, get_recipient, get_stream, get_user,
|
2017-04-12 22:25:21 +02:00
|
|
|
Message, RealmDomain, Recipient, UserMessage, UserPresence, UserProfile,
|
|
|
|
Realm,
|
2014-01-31 23:23:39 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
2017-03-24 05:49:23 +01:00
|
|
|
bulk_add_subscriptions,
|
2016-10-20 16:53:22 +02:00
|
|
|
bulk_remove_subscriptions,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_add_alert_words,
|
2016-02-12 21:08:56 +01:00
|
|
|
check_add_realm_emoji,
|
2017-03-18 03:50:41 +01:00
|
|
|
check_send_typing_notification,
|
2017-03-17 10:07:22 +01:00
|
|
|
notify_realm_custom_profile_fields,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_add_realm_filter,
|
2017-03-23 04:04:19 +01:00
|
|
|
do_add_reaction,
|
|
|
|
do_remove_reaction,
|
2017-01-28 19:05:20 +01:00
|
|
|
do_change_avatar_fields,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_change_default_all_public_streams,
|
|
|
|
do_change_default_events_register_stream,
|
|
|
|
do_change_default_sending_stream,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_change_full_name,
|
2017-02-24 06:36:54 +01:00
|
|
|
do_change_bot_owner,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_change_is_admin,
|
|
|
|
do_change_stream_description,
|
2016-07-01 07:26:09 +02:00
|
|
|
do_change_subscription_property,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_create_user,
|
2016-07-12 23:57:16 +02:00
|
|
|
do_deactivate_stream,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_deactivate_user,
|
2017-01-24 01:48:35 +01:00
|
|
|
do_mark_hotspot_as_read,
|
2017-02-15 21:06:07 +01:00
|
|
|
do_reactivate_user,
|
2017-03-24 05:57:28 +01:00
|
|
|
do_refer_friend,
|
2014-03-06 16:34:54 +01:00
|
|
|
do_regenerate_api_key,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_remove_alert_words,
|
|
|
|
do_remove_realm_emoji,
|
|
|
|
do_remove_realm_filter,
|
|
|
|
do_rename_stream,
|
2016-05-20 22:08:42 +02:00
|
|
|
do_add_default_stream,
|
2017-01-30 04:25:40 +01:00
|
|
|
do_remove_default_stream,
|
2014-01-31 23:23:39 +01:00
|
|
|
do_set_muted_topics,
|
2017-03-21 18:08:40 +01:00
|
|
|
do_set_realm_property,
|
2016-11-02 21:51:56 +01:00
|
|
|
do_set_realm_authentication_methods,
|
2017-03-21 18:08:40 +01:00
|
|
|
do_set_realm_message_editing,
|
2017-03-24 05:54:20 +01:00
|
|
|
do_update_embedded_data,
|
2014-03-11 15:14:32 +01:00
|
|
|
do_update_message,
|
2017-03-24 03:19:23 +01:00
|
|
|
do_update_message_flags,
|
2017-03-24 05:32:50 +01:00
|
|
|
do_update_muted_topic,
|
2014-02-02 15:14:57 +01:00
|
|
|
do_update_pointer,
|
2017-03-24 05:26:32 +01:00
|
|
|
do_update_user_presence,
|
2017-04-07 00:05:55 +02:00
|
|
|
do_set_user_display_setting,
|
2017-05-23 03:19:21 +02:00
|
|
|
do_change_notification_settings,
|
2017-03-31 19:53:34 +02:00
|
|
|
do_add_realm_domain,
|
2017-03-31 19:57:46 +02:00
|
|
|
do_change_realm_domain,
|
2017-03-31 20:00:05 +02:00
|
|
|
do_remove_realm_domain,
|
2016-11-30 10:42:58 +01:00
|
|
|
do_change_icon_source,
|
2017-05-22 23:02:24 +02:00
|
|
|
log_event,
|
2017-05-14 21:14:26 +02:00
|
|
|
do_delete_message,
|
2016-11-30 10:42:58 +01:00
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.events import (
|
|
|
|
apply_events,
|
2014-01-31 23:23:39 +01:00
|
|
|
fetch_initial_state_data,
|
|
|
|
)
|
2016-10-04 18:32:46 +02:00
|
|
|
from zerver.lib.message import render_markdown
|
2017-01-30 04:31:24 +01:00
|
|
|
from zerver.lib.test_helpers import POSTRequestMock, get_subscription
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.test_classes import (
|
|
|
|
ZulipTestCase,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
from zerver.lib.validator import (
|
2017-03-26 08:13:01 +02:00
|
|
|
check_bool, check_dict, check_dict_only, check_float, check_int, check_list, check_string,
|
2016-06-04 19:54:34 +02:00
|
|
|
equals, check_none_or, Validator
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
|
|
|
|
2016-10-12 05:25:20 +02:00
|
|
|
from zerver.views.events_register import _default_all_public_streams, _default_narrow
|
2014-02-07 01:22:19 +01:00
|
|
|
|
2016-11-27 06:56:06 +01:00
|
|
|
from zerver.tornado.event_queue import allocate_client_descriptor, EventQueue
|
2016-11-27 06:50:54 +01:00
|
|
|
from zerver.tornado.views import get_events_backend
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
from collections import OrderedDict
|
2016-11-17 16:52:28 +01:00
|
|
|
import mock
|
2014-01-28 18:11:08 +01:00
|
|
|
import time
|
2014-01-31 23:23:39 +01:00
|
|
|
import ujson
|
2015-11-01 17:15:05 +01:00
|
|
|
from six.moves import range
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
|
|
|
class LogEventsTest(ZulipTestCase):
|
2017-05-22 23:36:12 +02:00
|
|
|
def test_with_missing_event_log_dir_setting(self):
|
|
|
|
# type: () -> None
|
2017-05-22 23:02:24 +02:00
|
|
|
with self.settings(EVENT_LOG_DIR=None):
|
|
|
|
log_event(None)
|
|
|
|
|
2017-05-22 23:36:12 +02:00
|
|
|
def test_log_event_mkdir(self):
|
|
|
|
# type: () -> None
|
|
|
|
dir_name = 'var/test-log-dir'
|
|
|
|
|
|
|
|
try:
|
|
|
|
shutil.rmtree(dir_name)
|
2017-05-30 00:26:33 +02:00
|
|
|
except OSError: # nocoverage
|
2017-05-22 23:36:12 +02:00
|
|
|
# assume it doesn't exist already
|
|
|
|
pass
|
|
|
|
|
|
|
|
self.assertFalse(os.path.exists(dir_name))
|
|
|
|
with self.settings(EVENT_LOG_DIR=dir_name):
|
|
|
|
event = {} # type: Dict[str, int]
|
|
|
|
log_event(event)
|
|
|
|
self.assertTrue(os.path.exists(dir_name))
|
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
2017-03-06 17:46:32 +01:00
|
|
|
class EventsEndpointTest(ZulipTestCase):
|
|
|
|
def test_events_register_endpoint(self):
|
|
|
|
# type: () -> None
|
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
# This test is intended to get minimal coverage on the
|
|
|
|
# events_register code paths
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2017-03-06 17:46:32 +01:00
|
|
|
with mock.patch('zerver.views.events_register.do_events_register', return_value={}):
|
|
|
|
result = self.client_post('/json/register', **self.api_auth(email))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value=None):
|
|
|
|
result = self.client_post('/json/register', **self.api_auth(email))
|
|
|
|
self.assert_json_error(result, "Could not allocate event queue")
|
|
|
|
|
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:11'):
|
|
|
|
with mock.patch('zerver.lib.events.get_user_events',
|
|
|
|
return_value=[]):
|
|
|
|
result = self.client_post('/json/register', dict(event_types=ujson.dumps(['pointer'])),
|
|
|
|
**self.api_auth(email))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = ujson.loads(result.content)
|
|
|
|
self.assertEqual(result_dict['last_event_id'], -1)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:11')
|
|
|
|
|
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:12'):
|
|
|
|
with mock.patch('zerver.lib.events.get_user_events',
|
|
|
|
return_value=[{
|
|
|
|
'id': 6,
|
|
|
|
'type': 'pointer',
|
|
|
|
'pointer': 15,
|
|
|
|
}]):
|
|
|
|
result = self.client_post('/json/register', dict(event_types=ujson.dumps(['pointer'])),
|
|
|
|
**self.api_auth(email))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = ujson.loads(result.content)
|
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
self.assertEqual(result_dict['pointer'], 15)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:12')
|
|
|
|
|
2017-04-26 23:29:25 +02:00
|
|
|
# Now test with `fetch_event_types` not matching the event
|
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:13'):
|
|
|
|
with mock.patch('zerver.lib.events.get_user_events',
|
|
|
|
return_value=[{
|
|
|
|
'id': 6,
|
|
|
|
'type': 'pointer',
|
|
|
|
'pointer': 15,
|
|
|
|
}]):
|
|
|
|
result = self.client_post('/json/register',
|
|
|
|
dict(event_types=ujson.dumps(['pointer']),
|
|
|
|
fetch_event_types=ujson.dumps(['message'])),
|
|
|
|
**self.api_auth(email))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = ujson.loads(result.content)
|
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
# Check that the message event types data is in there
|
|
|
|
self.assertIn('max_message_id', result_dict)
|
|
|
|
# Check that the pointer event types data is not in there
|
|
|
|
self.assertNotIn('pointer', result_dict)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:13')
|
|
|
|
|
|
|
|
# Now test with `fetch_event_types` matching the event
|
|
|
|
with mock.patch('zerver.lib.events.request_event_queue', return_value='15:13'):
|
|
|
|
with mock.patch('zerver.lib.events.get_user_events',
|
|
|
|
return_value=[{
|
|
|
|
'id': 6,
|
|
|
|
'type': 'pointer',
|
|
|
|
'pointer': 15,
|
|
|
|
}]):
|
|
|
|
result = self.client_post('/json/register',
|
|
|
|
dict(fetch_event_types=ujson.dumps(['pointer']),
|
|
|
|
event_types=ujson.dumps(['message'])),
|
|
|
|
**self.api_auth(email))
|
|
|
|
self.assert_json_success(result)
|
|
|
|
result_dict = ujson.loads(result.content)
|
|
|
|
self.assertEqual(result_dict['last_event_id'], 6)
|
|
|
|
# Check that we didn't fetch the messages data
|
|
|
|
self.assertNotIn('max_message_id', result_dict)
|
|
|
|
# Check that the pointer data is in there, and is correctly
|
|
|
|
# updated (presering our atomicity guaranteed), though of
|
|
|
|
# course any future pointer events won't be distributed
|
|
|
|
self.assertIn('pointer', result_dict)
|
|
|
|
self.assertEqual(result_dict['pointer'], 15)
|
|
|
|
self.assertEqual(result_dict['queue_id'], '15:13')
|
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
def test_tornado_endpoint(self):
|
|
|
|
# type: () -> None
|
|
|
|
|
|
|
|
# This test is mostly intended to get minimal coverage on
|
|
|
|
# the /notify_tornado endpoint, so we can have 100% URL coverage,
|
|
|
|
# but it does exercise a little bit of the codepath.
|
|
|
|
post_data = dict(
|
|
|
|
data=ujson.dumps(
|
|
|
|
dict(
|
|
|
|
event=dict(
|
|
|
|
type='other'
|
|
|
|
),
|
2017-05-07 17:21:26 +02:00
|
|
|
users=[self.example_user('hamlet').id],
|
2016-11-17 16:52:28 +01:00
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
req = POSTRequestMock(post_data, user_profile=None)
|
|
|
|
req.META['REMOTE_ADDR'] = '127.0.0.1'
|
|
|
|
result = self.client_post_request('/notify_tornado', req)
|
|
|
|
self.assert_json_error(result, 'Access denied', status_code=403)
|
|
|
|
|
|
|
|
post_data['secret'] = settings.SHARED_SECRET
|
|
|
|
req = POSTRequestMock(post_data, user_profile=None)
|
|
|
|
req.META['REMOTE_ADDR'] = '127.0.0.1'
|
|
|
|
result = self.client_post_request('/notify_tornado', req)
|
|
|
|
self.assert_json_success(result)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class GetEventsTest(ZulipTestCase):
|
2016-07-14 01:28:40 +02:00
|
|
|
def tornado_call(self, view_func, user_profile, post_data):
|
|
|
|
# type: (Callable[[HttpRequest, UserProfile], HttpResponse], UserProfile, Dict[str, Any]) -> HttpResponse
|
|
|
|
request = POSTRequestMock(post_data, user_profile)
|
2014-01-31 23:23:39 +01:00
|
|
|
return view_func(request, user_profile)
|
|
|
|
|
|
|
|
def test_get_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2017-05-07 21:25:59 +02:00
|
|
|
recipient_user_profile = self.example_user('othello')
|
|
|
|
recipient_email = recipient_user_profile.email
|
2014-01-31 23:23:39 +01:00
|
|
|
self.login(email)
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"apply_markdown": ujson.dumps(True),
|
|
|
|
"event_types": ujson.dumps(["message"]),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
queue_id = ujson.loads(result.content)["queue_id"]
|
|
|
|
|
|
|
|
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
|
|
|
|
{"apply_markdown": ujson.dumps(True),
|
|
|
|
"event_types": ujson.dumps(["message"]),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
self.assert_json_success(recipient_result)
|
|
|
|
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 0)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
local_id = 10.01
|
|
|
|
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 1)
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
2014-07-18 00:18:06 +02:00
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
last_event_id = events[0]["id"]
|
|
|
|
local_id += 0.01
|
|
|
|
|
|
|
|
self.send_message(email, recipient_email, Recipient.PERSONAL, "hello", local_id=local_id, sender_queue_id=queue_id)
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": last_event_id,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 1)
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertEqual(events[0]["local_message_id"], local_id)
|
|
|
|
|
|
|
|
# Test that the received message in the receiver's event queue
|
|
|
|
# exists and does not contain a local id
|
|
|
|
recipient_result = self.tornado_call(get_events_backend, recipient_user_profile,
|
|
|
|
{"queue_id": recipient_queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
recipient_events = ujson.loads(recipient_result.content)["events"]
|
|
|
|
self.assert_json_success(recipient_result)
|
|
|
|
self.assertEqual(len(recipient_events), 2)
|
|
|
|
self.assertEqual(recipient_events[0]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
|
|
|
|
self.assertTrue("local_message_id" not in recipient_events[0])
|
|
|
|
self.assertEqual(recipient_events[1]["type"], "message")
|
|
|
|
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
|
|
|
|
self.assertTrue("local_message_id" not in recipient_events[1])
|
|
|
|
|
|
|
|
def test_get_events_narrow(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
email = user_profile.email
|
2014-01-31 23:23:39 +01:00
|
|
|
self.login(email)
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"apply_markdown": ujson.dumps(True),
|
|
|
|
"event_types": ujson.dumps(["message"]),
|
|
|
|
"narrow": ujson.dumps([["stream", "denmark"]]),
|
|
|
|
"user_client": "website",
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
queue_id = ujson.loads(result.content)["queue_id"]
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 0)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-05-25 02:08:35 +02:00
|
|
|
self.send_message(email, self.example_email("othello"), Recipient.PERSONAL, "hello")
|
2014-01-31 23:23:39 +01:00
|
|
|
self.send_message(email, "Denmark", Recipient.STREAM, "hello")
|
|
|
|
|
|
|
|
result = self.tornado_call(get_events_backend, user_profile,
|
|
|
|
{"queue_id": queue_id,
|
|
|
|
"user_client": "website",
|
|
|
|
"last_event_id": -1,
|
|
|
|
"dont_block": ujson.dumps(True),
|
|
|
|
})
|
|
|
|
events = ujson.loads(result.content)["events"]
|
|
|
|
self.assert_json_success(result)
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(events, 1)
|
2014-01-31 23:23:39 +01:00
|
|
|
self.assertEqual(events[0]["type"], "message")
|
|
|
|
self.assertEqual(events[0]["message"]["display_recipient"], "Denmark")
|
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class EventsRegisterTest(ZulipTestCase):
|
2017-05-24 02:42:31 +02:00
|
|
|
|
2017-05-24 08:33:30 +02:00
|
|
|
def setUp(self):
|
2017-05-24 21:14:13 +02:00
|
|
|
# type: () -> None
|
2017-05-24 08:33:30 +02:00
|
|
|
super(EventsRegisterTest, self).setUp()
|
|
|
|
self.user_profile = self.example_user('hamlet')
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2014-02-26 22:27:51 +01:00
|
|
|
def create_bot(self, email):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: (str) -> UserProfile
|
2014-02-26 22:27:51 +01:00
|
|
|
return do_create_user(email, '123',
|
2017-01-04 05:30:48 +01:00
|
|
|
get_realm('zulip'), 'Test Bot', 'test',
|
2016-05-18 20:23:03 +02:00
|
|
|
bot_type=UserProfile.DEFAULT_BOT, bot_owner=self.user_profile)
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2014-03-06 16:50:28 +01:00
|
|
|
def realm_bot_schema(self, field_name, check):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: (str, Validator) -> Validator
|
2017-04-20 17:31:41 +02:00
|
|
|
return self.check_events_dict([
|
2014-02-26 19:55:29 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('update')),
|
2017-03-26 08:13:01 +02:00
|
|
|
('bot', check_dict_only([
|
2014-02-26 19:55:29 +01:00
|
|
|
('email', check_string),
|
2016-10-26 05:18:50 +02:00
|
|
|
('user_id', check_int),
|
2014-02-26 19:55:29 +01:00
|
|
|
(field_name, check),
|
|
|
|
])),
|
|
|
|
])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-03-26 08:17:48 +02:00
|
|
|
def do_test(self, action, event_types=None, include_subscribers=True, state_change_expected=True,
|
|
|
|
num_events=1):
|
|
|
|
# type: (Callable[[], Any], Optional[List[str]], bool, bool, int) -> List[Dict[str, Any]]
|
2014-01-28 18:11:08 +01:00
|
|
|
client = allocate_client_descriptor(
|
|
|
|
dict(user_profile_id = self.user_profile.id,
|
|
|
|
user_profile_email = self.user_profile.email,
|
2017-01-03 21:04:55 +01:00
|
|
|
realm_id = self.user_profile.realm_id,
|
2014-01-28 18:11:08 +01:00
|
|
|
event_types = event_types,
|
|
|
|
client_type_name = "website",
|
|
|
|
apply_markdown = True,
|
|
|
|
all_public_streams = False,
|
|
|
|
queue_timeout = 600,
|
|
|
|
last_connection_time = time.time(),
|
|
|
|
narrow = [])
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
# hybrid_state = initial fetch state + re-applying events triggered by our action
|
|
|
|
# normal_state = do action then fetch at the end (the "normal" code path)
|
2017-02-20 08:30:09 +01:00
|
|
|
hybrid_state = fetch_initial_state_data(self.user_profile, event_types, "", include_subscribers=include_subscribers)
|
2014-01-31 23:23:39 +01:00
|
|
|
action()
|
|
|
|
events = client.event_queue.contents()
|
2017-03-26 08:17:48 +02:00
|
|
|
self.assertTrue(len(events) == num_events)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
|
|
|
before = ujson.dumps(hybrid_state)
|
2017-02-20 08:30:09 +01:00
|
|
|
apply_events(hybrid_state, events, self.user_profile, include_subscribers=include_subscribers)
|
2017-02-21 19:35:17 +01:00
|
|
|
after = ujson.dumps(hybrid_state)
|
|
|
|
|
|
|
|
if state_change_expected:
|
|
|
|
if before == after:
|
2017-03-05 08:12:19 +01:00
|
|
|
print(events) # nocoverage
|
|
|
|
raise AssertionError('Test does not exercise enough code -- events do not change state.')
|
2017-02-21 19:35:17 +01:00
|
|
|
else:
|
|
|
|
if before != after:
|
2017-03-05 08:12:19 +01:00
|
|
|
raise AssertionError('Test is invalid--state actually does change here.')
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-02-20 08:30:09 +01:00
|
|
|
normal_state = fetch_initial_state_data(self.user_profile, event_types, "", include_subscribers=include_subscribers)
|
2014-02-04 19:09:30 +01:00
|
|
|
self.match_states(hybrid_state, normal_state)
|
2014-02-04 20:52:02 +01:00
|
|
|
return events
|
|
|
|
|
|
|
|
def assert_on_error(self, error):
|
2017-02-11 04:41:23 +01:00
|
|
|
# type: (Optional[str]) -> None
|
2014-02-04 20:52:02 +01:00
|
|
|
if error:
|
|
|
|
raise AssertionError(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2014-02-04 19:09:30 +01:00
|
|
|
def match_states(self, state1, state2):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: (Dict[str, Any], Dict[str, Any]) -> None
|
2014-02-04 19:09:30 +01:00
|
|
|
def normalize(state):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: (Dict[str, Any]) -> None
|
2014-02-04 19:09:30 +01:00
|
|
|
state['realm_users'] = {u['email']: u for u in state['realm_users']}
|
2016-07-01 01:52:51 +02:00
|
|
|
for u in state['subscriptions']:
|
2017-02-20 08:30:09 +01:00
|
|
|
if 'subscribers' in u:
|
|
|
|
u['subscribers'].sort()
|
2014-02-04 19:09:30 +01:00
|
|
|
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
|
|
|
|
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
|
2014-02-26 00:12:14 +01:00
|
|
|
if 'realm_bots' in state:
|
|
|
|
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
|
2014-02-04 19:09:30 +01:00
|
|
|
normalize(state1)
|
|
|
|
normalize(state2)
|
|
|
|
self.assertEqual(state1, state2)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
def check_events_dict(self, required_keys):
|
|
|
|
# type: (List[Tuple[str, Validator]]) -> Validator
|
|
|
|
required_keys.append(('id', check_int))
|
|
|
|
return check_dict_only(required_keys)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_send_message_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('message')),
|
|
|
|
('flags', check_list(None)),
|
2017-04-20 17:31:41 +02:00
|
|
|
('message', self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('avatar_url', check_string),
|
|
|
|
('client', check_string),
|
|
|
|
('content', check_string),
|
|
|
|
('content_type', equals('text/html')),
|
|
|
|
('display_recipient', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('is_mentioned', check_bool),
|
|
|
|
('reactions', check_list(None)),
|
2014-03-06 17:07:43 +01:00
|
|
|
('recipient_id', check_int),
|
2017-03-14 23:31:05 +01:00
|
|
|
('sender_realm_str', check_string),
|
2014-03-06 17:07:43 +01:00
|
|
|
('sender_email', check_string),
|
|
|
|
('sender_full_name', check_string),
|
|
|
|
('sender_id', check_int),
|
|
|
|
('sender_short_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('stream_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
('subject', check_string),
|
|
|
|
('subject_links', check_list(None)),
|
|
|
|
('timestamp', check_int),
|
|
|
|
('type', check_string),
|
|
|
|
])),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
events = self.do_test(
|
2017-05-25 01:40:26 +02:00
|
|
|
lambda: self.send_message(self.example_email("hamlet"), "Verona", Recipient.STREAM, "hello"),
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-03-24 05:54:20 +01:00
|
|
|
# Verify message editing
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-11 15:14:32 +01:00
|
|
|
('type', equals('update_message')),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('content', check_string),
|
|
|
|
('edit_timestamp', check_int),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('message_id', check_int),
|
|
|
|
('message_ids', check_list(check_int)),
|
|
|
|
('orig_content', check_string),
|
|
|
|
('orig_rendered_content', check_string),
|
|
|
|
('orig_subject', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('prev_rendered_content_version', check_int),
|
2014-03-11 21:33:50 +01:00
|
|
|
('propagate_mode', check_string),
|
2014-03-11 15:14:32 +01:00
|
|
|
('rendered_content', check_string),
|
|
|
|
('sender', check_string),
|
2014-03-11 14:40:22 +01:00
|
|
|
('stream_id', check_int),
|
2014-03-11 15:14:32 +01:00
|
|
|
('subject', check_string),
|
|
|
|
('subject_links', check_list(None)),
|
2017-02-20 00:23:42 +01:00
|
|
|
('user_id', check_int),
|
2014-03-11 15:14:32 +01:00
|
|
|
])
|
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
message = Message.objects.order_by('-id')[0]
|
2014-03-11 15:14:32 +01:00
|
|
|
topic = 'new_topic'
|
|
|
|
propagate_mode = 'change_all'
|
|
|
|
content = 'new content'
|
2016-10-04 18:32:46 +02:00
|
|
|
rendered_content = render_markdown(message, content)
|
2017-02-21 19:35:17 +01:00
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_message(self.user_profile, message, topic,
|
|
|
|
propagate_mode, content, rendered_content),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2014-03-11 15:14:32 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:54:20 +01:00
|
|
|
# Verify do_update_embedded_data
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-24 05:54:20 +01:00
|
|
|
('type', equals('update_message')),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('content', check_string),
|
|
|
|
('flags', check_list(None)),
|
|
|
|
('message_id', check_int),
|
|
|
|
('message_ids', check_list(check_int)),
|
|
|
|
('rendered_content', check_string),
|
|
|
|
('sender', check_string),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_embedded_data(self.user_profile, message,
|
|
|
|
u"embed_content", "<p>embed_content</p>"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 03:19:23 +01:00
|
|
|
def test_update_message_flags(self):
|
|
|
|
# type: () -> None
|
|
|
|
# Test message flag update events
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('all', check_bool),
|
2017-03-24 03:19:23 +01:00
|
|
|
('type', equals('update_message_flags')),
|
|
|
|
('flag', check_string),
|
|
|
|
('messages', check_list(check_int)),
|
|
|
|
('operation', equals("add")),
|
|
|
|
])
|
|
|
|
|
2017-05-25 01:50:35 +02:00
|
|
|
message = self.send_message(self.example_email("cordelia"), self.example_email("hamlet"), Recipient.PERSONAL, "hello")
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-03-24 03:19:23 +01:00
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_message_flags(user_profile, 'add', 'starred',
|
|
|
|
[message], False, None, None),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('all', check_bool),
|
2017-03-24 03:19:23 +01:00
|
|
|
('type', equals('update_message_flags')),
|
|
|
|
('flag', check_string),
|
|
|
|
('messages', check_list(check_int)),
|
|
|
|
('operation', equals("remove")),
|
|
|
|
])
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_update_message_flags(user_profile, 'remove', 'starred',
|
|
|
|
[message], False, None, None),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-23 04:04:19 +01:00
|
|
|
def test_send_reaction(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-23 04:04:19 +01:00
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('add')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user', check_dict_only([
|
2017-03-23 04:04:19 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-05-25 01:40:26 +02:00
|
|
|
message_id = self.send_message(self.example_email("hamlet"), "Verona", Recipient.STREAM, "hello")
|
2017-03-23 04:04:19 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_add_reaction(
|
|
|
|
self.user_profile, message, "tada"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_remove_reaction(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-23 04:04:19 +01:00
|
|
|
('type', equals('reaction')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('emoji_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user', check_dict_only([
|
2017-03-23 04:04:19 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
|
|
|
('user_id', check_int)
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-05-25 01:40:26 +02:00
|
|
|
message_id = self.send_message(self.example_email("hamlet"), "Verona", Recipient.STREAM, "hello")
|
2017-03-23 04:04:19 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_remove_reaction(
|
|
|
|
self.user_profile, message, "tada"),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-18 03:50:41 +01:00
|
|
|
def test_typing_events(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-18 03:50:41 +01:00
|
|
|
('type', equals('typing')),
|
|
|
|
('op', equals('start')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('sender', check_dict_only([
|
2017-03-18 03:50:41 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int)])),
|
2017-04-20 17:31:41 +02:00
|
|
|
('recipients', check_list(check_dict_only([
|
2017-03-18 03:50:41 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: check_send_typing_notification(
|
2017-05-25 01:50:35 +02:00
|
|
|
self.user_profile, [self.example_email("cordelia")], "start"),
|
2017-03-18 03:50:41 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
def test_custom_profile_fields_events(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-17 10:07:22 +01:00
|
|
|
('type', equals('custom_profile_fields')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('fields', check_list(check_dict_only([
|
2017-03-17 10:07:22 +01:00
|
|
|
('type', check_int),
|
|
|
|
('name', check_string),
|
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: notify_realm_custom_profile_fields(
|
|
|
|
self.user_profile.realm),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:26:32 +01:00
|
|
|
def test_presence_events(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-04-25 11:50:30 +02:00
|
|
|
('type', equals('presence')),
|
|
|
|
('email', check_string),
|
|
|
|
('server_timestamp', check_float),
|
|
|
|
('presence', check_dict_only([
|
|
|
|
('website', check_dict_only([
|
|
|
|
('status', equals('active')),
|
|
|
|
('timestamp', check_int),
|
|
|
|
('client', check_string),
|
|
|
|
('pushable', check_bool),
|
|
|
|
])),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_presence_events_multiple_clients(self):
|
|
|
|
# type: () -> None
|
|
|
|
schema_checker_android = self.check_events_dict([
|
|
|
|
('type', equals('presence')),
|
2017-03-24 05:26:32 +01:00
|
|
|
('email', check_string),
|
2017-04-25 11:50:30 +02:00
|
|
|
('server_timestamp', check_float),
|
2017-04-20 17:31:41 +02:00
|
|
|
('presence', check_dict_only([
|
2017-04-25 11:50:30 +02:00
|
|
|
('ZulipAndroid/1.0', check_dict_only([
|
|
|
|
('status', equals('idle')),
|
|
|
|
('timestamp', check_int),
|
|
|
|
('client', check_string),
|
|
|
|
('pushable', check_bool),
|
|
|
|
])),
|
2017-03-24 05:26:32 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-04-25 11:50:30 +02:00
|
|
|
self.client_post("/api/v1/users/me/presence", {'status': 'idle'},
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid/1.0",
|
|
|
|
**self.api_auth(self.user_profile.email))
|
|
|
|
self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
|
|
|
|
events = self.do_test(lambda: do_update_user_presence(
|
|
|
|
self.user_profile, get_client("ZulipAndroid/1.0"), timezone_now(), UserPresence.IDLE))
|
|
|
|
error = schema_checker_android('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-03-24 05:26:32 +01:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_pointer_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('pointer')),
|
|
|
|
('pointer', check_int)
|
|
|
|
])
|
|
|
|
events = self.do_test(lambda: do_update_pointer(self.user_profile, 1500))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-03-24 05:57:28 +01:00
|
|
|
def test_referral_events(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-03-24 05:57:28 +01:00
|
|
|
('type', equals('referral')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('referrals', check_dict_only([
|
2017-03-24 05:57:28 +01:00
|
|
|
('granted', check_int),
|
|
|
|
('used', check_int),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(lambda: do_refer_friend(self.user_profile, "friend@example.com"))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_register_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
realm_user_add_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('avatar_url', check_string),
|
2014-03-06 17:07:43 +01:00
|
|
|
('full_name', check_string),
|
|
|
|
('is_admin', check_bool),
|
|
|
|
('is_bot', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
('timezone', check_string),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
|
2017-01-04 09:00:26 +01:00
|
|
|
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
|
2017-03-24 05:49:23 +01:00
|
|
|
self.assert_length(events, 1)
|
2014-03-06 17:07:43 +01:00
|
|
|
error = realm_user_add_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
def test_alert_words_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
alert_words_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('alert_words')),
|
|
|
|
('alert_words', check_list(check_string)),
|
|
|
|
])
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
|
|
|
|
error = alert_words_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
|
|
|
|
error = alert_words_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2016-05-20 22:08:42 +02:00
|
|
|
def test_default_streams_events(self):
|
2016-07-30 01:06:02 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
default_streams_checker = self.check_events_dict([
|
2016-05-20 22:08:42 +02:00
|
|
|
('type', equals('default_streams')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('default_streams', check_list(check_dict_only([
|
2016-05-20 22:08:42 +02:00
|
|
|
('description', check_string),
|
|
|
|
('invite_only', check_bool),
|
|
|
|
('name', check_string),
|
|
|
|
('stream_id', check_int),
|
|
|
|
]))),
|
|
|
|
])
|
|
|
|
|
2017-01-30 04:23:08 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
|
|
|
events = self.do_test(lambda: do_add_default_stream(stream))
|
2016-05-20 22:08:42 +02:00
|
|
|
error = default_streams_checker('events[0]', events[0])
|
2017-01-30 04:25:40 +01:00
|
|
|
events = self.do_test(lambda: do_remove_default_stream(stream))
|
|
|
|
error = default_streams_checker('events[0]', events[0])
|
2016-05-20 22:08:42 +02:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_muted_topics_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
muted_topics_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('muted_topics')),
|
|
|
|
('muted_topics', check_list(check_list(check_string, 2))),
|
|
|
|
])
|
2016-06-05 04:58:33 +02:00
|
|
|
events = self.do_test(lambda: do_set_muted_topics(self.user_profile, [[u"Denmark", u"topic"]]))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = muted_topics_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:32:50 +01:00
|
|
|
events = self.do_test(lambda: do_update_muted_topic(
|
|
|
|
self.user_profile, "Denmark", "topic", "add"))
|
|
|
|
error = muted_topics_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
events = self.do_test(lambda: do_update_muted_topic(
|
|
|
|
self.user_profile, "Denmark", "topic", "remove"))
|
|
|
|
error = muted_topics_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-21 21:37:16 +01:00
|
|
|
def test_change_avatar_fields(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-21 21:37:16 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2017-02-21 21:37:16 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('avatar_url', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_USER),
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_change_full_name(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-04-07 07:28:28 +02:00
|
|
|
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet', self.user_profile))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-04-12 22:25:21 +02:00
|
|
|
def do_set_realm_property_test(self, name):
|
|
|
|
# type: (str) -> None
|
|
|
|
bool_tests = [True, False, True] # type: List[bool]
|
|
|
|
test_values = dict(
|
|
|
|
add_emoji_by_admins_only=bool_tests,
|
|
|
|
create_stream_by_admins_only=bool_tests,
|
|
|
|
default_language=[u'es', u'de', u'en'],
|
|
|
|
description=[u'Realm description', u'New description'],
|
|
|
|
email_changes_disabled=bool_tests,
|
|
|
|
invite_required=bool_tests,
|
|
|
|
invite_by_admins_only=bool_tests,
|
|
|
|
inline_image_preview=bool_tests,
|
|
|
|
inline_url_embed_preview=bool_tests,
|
|
|
|
message_retention_days=[10, 20],
|
|
|
|
name=[u'Zulip', u'New Name'],
|
|
|
|
name_changes_disabled=bool_tests,
|
|
|
|
restricted_to_domain=bool_tests,
|
|
|
|
waiting_period_threshold=[10, 20],
|
|
|
|
) # type: Dict[str, Any]
|
|
|
|
|
|
|
|
property_type = Realm.property_types[name]
|
2017-03-24 13:04:17 +01:00
|
|
|
if property_type is bool:
|
|
|
|
validator = check_bool
|
|
|
|
elif property_type is Text:
|
|
|
|
validator = check_string
|
|
|
|
elif property_type is int:
|
|
|
|
validator = check_int
|
2017-04-09 00:35:41 +02:00
|
|
|
elif property_type == (int, type(None)):
|
|
|
|
validator = check_int
|
2017-03-24 13:04:17 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected property type %s" % (property_type,))
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update')),
|
2017-03-24 13:04:17 +01:00
|
|
|
('property', equals(name)),
|
|
|
|
('value', validator),
|
2014-03-06 17:07:43 +01:00
|
|
|
])
|
2017-03-24 13:04:17 +01:00
|
|
|
|
2017-04-12 22:25:21 +02:00
|
|
|
vals = test_values.get(name)
|
|
|
|
if vals is None:
|
|
|
|
raise AssertionError('No test created for %s' % (name))
|
|
|
|
do_set_realm_property(self.user_profile.realm, name, vals[0])
|
|
|
|
for val in vals[1:]:
|
2017-03-24 13:04:17 +01:00
|
|
|
events = self.do_test(
|
2017-04-12 22:25:21 +02:00
|
|
|
lambda: do_set_realm_property(self.user_profile.realm, name, val))
|
2017-03-24 13:04:17 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-04-12 22:25:21 +02:00
|
|
|
def test_change_realm_property(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2015-08-20 23:31:01 +02:00
|
|
|
|
2017-04-12 22:25:21 +02:00
|
|
|
for prop in Realm.property_types:
|
|
|
|
self.do_set_realm_property_test(prop)
|
2017-03-04 06:39:45 +01:00
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
def test_change_realm_authentication_methods(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-11-02 21:51:56 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('default')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('authentication_methods', check_dict([]))
|
|
|
|
])),
|
2016-11-02 21:51:56 +01:00
|
|
|
])
|
2017-02-21 19:35:17 +01:00
|
|
|
|
|
|
|
def fake_backends():
|
|
|
|
# type: () -> Any
|
|
|
|
backends = (
|
|
|
|
'zproject.backends.DevAuthBackend',
|
|
|
|
'zproject.backends.EmailAuthBackend',
|
|
|
|
'zproject.backends.GitHubAuthBackend',
|
|
|
|
'zproject.backends.GoogleMobileOauth2Backend',
|
|
|
|
'zproject.backends.ZulipLDAPAuthBackend',
|
|
|
|
)
|
|
|
|
return self.settings(AUTHENTICATION_BACKENDS=backends)
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
# Test transitions; any new backends should be tested with T/T/T/F/T
|
|
|
|
for (auth_method_dict) in \
|
|
|
|
({'Google': True, 'Email': True, 'GitHub': True, 'LDAP': False, 'Dev': False},
|
2016-12-03 00:04:17 +01:00
|
|
|
{'Google': True, 'Email': True, 'GitHub': False, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': True, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': True, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': False},
|
|
|
|
{'Google': False, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': True},
|
|
|
|
{'Google': False, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': True},
|
|
|
|
{'Google': False, 'Email': True, 'GitHub': True, 'LDAP': True, 'Dev': False}):
|
2017-02-21 19:35:17 +01:00
|
|
|
with fake_backends():
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_authentication_methods(
|
|
|
|
self.user_profile.realm,
|
|
|
|
auth_method_dict))
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2016-07-01 07:26:09 +02:00
|
|
|
def test_change_pin_stream(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-07-01 07:26:09 +02:00
|
|
|
('type', equals('subscription')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('pin_to_top')),
|
2017-03-05 01:30:48 +01:00
|
|
|
('stream_id', check_int),
|
2016-07-01 07:26:09 +02:00
|
|
|
('value', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
('name', check_string),
|
|
|
|
('email', check_string),
|
2016-07-01 07:26:09 +02:00
|
|
|
])
|
2017-03-05 01:30:48 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2017-02-21 19:35:17 +01:00
|
|
|
do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", False)
|
|
|
|
for pinned in (True, False):
|
2016-07-01 07:26:09 +02:00
|
|
|
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
def test_change_realm_message_edit_settings(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-06-21 21:34:41 +02:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('default')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('allow_message_editing', check_bool),
|
|
|
|
('message_content_edit_limit_seconds', check_int),
|
|
|
|
])),
|
2016-06-21 21:34:41 +02:00
|
|
|
])
|
2016-07-08 02:25:55 +02:00
|
|
|
# Test every transition among the four possibilities {T,F} x {0, non-0}
|
|
|
|
for (allow_message_editing, message_content_edit_limit_seconds) in \
|
|
|
|
((True, 0), (False, 0), (True, 0), (False, 1234), (True, 0), (True, 1234), (True, 0),
|
|
|
|
(False, 0), (False, 1234), (False, 0), (True, 1234), (False, 0),
|
|
|
|
(True, 1234), (True, 600), (False, 600), (False, 1234), (True, 600)):
|
2017-03-21 18:08:40 +01:00
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_set_realm_message_editing(self.user_profile.realm,
|
|
|
|
allow_message_editing,
|
|
|
|
message_content_edit_limit_seconds))
|
2016-06-21 21:34:41 +02:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_change_is_admin(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2014-03-06 17:07:43 +01:00
|
|
|
('email', check_string),
|
|
|
|
('is_admin', check_bool),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-03-06 17:07:43 +01:00
|
|
|
])),
|
|
|
|
])
|
2017-02-21 19:35:17 +01:00
|
|
|
do_change_is_admin(self.user_profile, False)
|
|
|
|
for is_admin in [True, False]:
|
2014-03-06 17:07:43 +01:00
|
|
|
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-04-07 00:05:55 +02:00
|
|
|
def do_set_user_display_settings_test(self, setting_name, values_list):
|
|
|
|
# type: (str, List[Union[bool, Text]]) -> None
|
|
|
|
|
|
|
|
property_type = UserProfile.property_types[setting_name]
|
|
|
|
if property_type is bool:
|
|
|
|
validator = check_bool
|
|
|
|
elif property_type is Text:
|
|
|
|
validator = check_string
|
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected property type %s" % (property_type,))
|
|
|
|
|
2017-04-02 20:57:27 +02:00
|
|
|
num_events = 1
|
|
|
|
if setting_name == "timezone":
|
|
|
|
num_events = 2
|
2017-04-07 00:05:55 +02:00
|
|
|
if property_type == bool:
|
|
|
|
do_set_user_display_setting(self.user_profile, setting_name, False)
|
|
|
|
for value in values_list:
|
|
|
|
events = self.do_test(lambda: do_set_user_display_setting(
|
2017-04-02 20:57:27 +02:00
|
|
|
self.user_profile, setting_name, value), num_events=num_events)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-04-02 20:57:27 +02:00
|
|
|
('type', equals('update_display_settings')),
|
|
|
|
('setting_name', equals(setting_name)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', validator),
|
|
|
|
])
|
2015-08-19 22:35:46 +02:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
timezone_schema_checker = self.check_events_dict([
|
2017-04-02 20:57:27 +02:00
|
|
|
('type', equals('realm_user')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('person', check_dict_only([
|
2017-04-02 20:57:27 +02:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('timezone', check_string),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
if setting_name == "timezone":
|
|
|
|
error = timezone_schema_checker('events[1]', events[1])
|
|
|
|
|
2017-04-07 00:05:55 +02:00
|
|
|
def test_change_twenty_four_hour_time(self):
|
|
|
|
# type: () -> None
|
|
|
|
self.do_set_user_display_settings_test("twenty_four_hour_time", [True, False])
|
|
|
|
|
2015-08-20 23:59:44 +02:00
|
|
|
def test_change_left_side_userlist(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-07 00:05:55 +02:00
|
|
|
self.do_set_user_display_settings_test("left_side_userlist", [True, False])
|
2015-08-20 23:59:44 +02:00
|
|
|
|
2017-03-02 08:30:53 +01:00
|
|
|
def test_change_emoji_alt_code(self):
|
|
|
|
# type: () -> None
|
2017-04-07 00:05:55 +02:00
|
|
|
self.do_set_user_display_settings_test("emoji_alt_code", [True, False])
|
2017-03-24 03:08:16 +01:00
|
|
|
|
2017-04-02 21:05:33 +02:00
|
|
|
def test_change_emojiset(self):
|
|
|
|
# type: () -> None
|
|
|
|
self.do_set_user_display_settings_test("emojiset", [u'apple', u'twitter'])
|
|
|
|
|
2017-03-24 03:08:16 +01:00
|
|
|
def test_change_default_language(self):
|
|
|
|
# type: () -> None
|
2017-04-07 04:26:35 +02:00
|
|
|
self.do_set_user_display_settings_test("default_language", [u'de', u'es', u'en'])
|
2017-03-24 03:11:34 +01:00
|
|
|
|
|
|
|
def test_change_timezone(self):
|
|
|
|
# type: () -> None
|
2017-04-07 04:26:35 +02:00
|
|
|
self.do_set_user_display_settings_test("timezone", [u'US/Mountain', u'US/Samoa', u'Pacific/Galapagos', u''])
|
2017-03-02 08:30:53 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
def test_change_notification_settings(self):
|
2016-12-08 21:06:23 +01:00
|
|
|
# type: () -> None
|
2017-05-23 03:19:21 +02:00
|
|
|
for notification_setting, v in self.user_profile.notification_setting_types.items():
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('update_global_notifications')),
|
|
|
|
('notification_name', equals(notification_setting)),
|
|
|
|
('user', check_string),
|
|
|
|
('setting', check_bool),
|
|
|
|
])
|
|
|
|
do_change_notification_settings(self.user_profile, notification_setting, False)
|
|
|
|
for setting_value in [True, False]:
|
|
|
|
events = self.do_test(lambda: do_change_notification_settings(
|
|
|
|
self.user_profile, notification_setting, setting_value, log=False))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2016-12-08 21:06:23 +01:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_realm_emoji_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_emoji')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('realm_emoji', check_dict([])),
|
|
|
|
])
|
2017-01-04 05:30:48 +01:00
|
|
|
events = self.do_test(lambda: check_add_realm_emoji(get_realm("zulip"), "my_emoji",
|
2016-12-03 00:04:17 +01:00
|
|
|
"https://realm.com/my_emoji"))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
events = self.do_test(lambda: do_remove_realm_emoji(get_realm("zulip"), "my_emoji"))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
|
|
|
def test_realm_filter_events(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-03-06 17:07:43 +01:00
|
|
|
('type', equals('realm_filters')),
|
|
|
|
('realm_filters', check_list(None)), # TODO: validate tuples in the list
|
|
|
|
])
|
2017-01-04 05:30:48 +01:00
|
|
|
events = self.do_test(lambda: do_add_realm_filter(get_realm("zulip"), "#(?P<id>[123])",
|
2014-03-06 17:07:43 +01:00
|
|
|
"https://realm.com/my_realm_filter/%(id)s"))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-04 05:30:48 +01:00
|
|
|
self.do_test(lambda: do_remove_realm_filter(get_realm("zulip"), "#(?P<id>[123])"))
|
2014-03-06 17:07:43 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-03-31 20:41:16 +02:00
|
|
|
def test_realm_domain_events(self):
|
2016-12-26 19:19:02 +01:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-12-26 19:19:02 +01:00
|
|
|
('type', equals('realm_domains')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('realm_domain', check_dict_only([
|
2016-12-26 19:19:02 +01:00
|
|
|
('domain', check_string),
|
2017-01-21 08:19:03 +01:00
|
|
|
('allow_subdomains', check_bool),
|
2016-12-26 19:19:02 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
realm = get_realm('zulip')
|
2017-03-31 19:53:34 +02:00
|
|
|
events = self.do_test(lambda: do_add_realm_domain(realm, 'zulip.org', False))
|
2016-12-26 19:19:02 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-12-26 19:19:02 +01:00
|
|
|
('type', equals('realm_domains')),
|
2017-02-09 22:44:03 +01:00
|
|
|
('op', equals('change')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('realm_domain', check_dict_only([
|
2017-02-09 22:44:03 +01:00
|
|
|
('domain', equals('zulip.org')),
|
|
|
|
('allow_subdomains', equals(True)),
|
|
|
|
])),
|
2016-12-26 19:19:02 +01:00
|
|
|
])
|
2017-03-31 20:41:16 +02:00
|
|
|
test_domain = RealmDomain.objects.get(realm=realm, domain='zulip.org')
|
|
|
|
events = self.do_test(lambda: do_change_realm_domain(test_domain, True))
|
2017-02-09 22:44:03 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-09 22:44:03 +01:00
|
|
|
('type', equals('realm_domains')),
|
|
|
|
('op', equals('remove')),
|
|
|
|
('domain', equals('zulip.org')),
|
|
|
|
])
|
2017-03-31 20:41:16 +02:00
|
|
|
events = self.do_test(lambda: do_remove_realm_domain(test_domain))
|
2016-12-26 19:19:02 +01:00
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 00:12:14 +01:00
|
|
|
def test_create_bot(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
bot_created_checker = self.check_events_dict([
|
2014-02-26 00:12:14 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2014-02-26 00:12:14 +01:00
|
|
|
('email', check_string),
|
2016-10-26 03:35:32 +02:00
|
|
|
('user_id', check_int),
|
2014-02-26 00:12:14 +01:00
|
|
|
('full_name', check_string),
|
2017-02-06 20:45:26 +01:00
|
|
|
('is_active', check_bool),
|
2014-02-26 00:12:14 +01:00
|
|
|
('api_key', check_string),
|
|
|
|
('default_sending_stream', check_none_or(check_string)),
|
|
|
|
('default_events_register_stream', check_none_or(check_string)),
|
|
|
|
('default_all_public_streams', check_bool),
|
|
|
|
('avatar_url', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('owner', check_string),
|
2014-02-26 00:12:14 +01:00
|
|
|
])),
|
|
|
|
])
|
2014-02-26 22:27:51 +01:00
|
|
|
action = lambda: self.create_bot('test-bot@zulip.com')
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-02-26 00:12:14 +01:00
|
|
|
error = bot_created_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 19:55:29 +01:00
|
|
|
def test_change_bot_full_name(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
2017-04-07 07:28:28 +02:00
|
|
|
action = lambda: do_change_full_name(bot, 'New Bot Name', self.user_profile)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
|
2014-02-26 19:55:29 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 20:17:19 +01:00
|
|
|
def test_regenerate_bot_api_key(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
2017-04-06 12:27:58 +02:00
|
|
|
action = lambda: do_regenerate_api_key(bot, self.user_profile)
|
2014-02-26 20:17:19 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
|
2014-02-26 20:17:19 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 21:05:10 +01:00
|
|
|
def test_change_bot_avatar_source(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
|
|
|
action = lambda: do_change_avatar_fields(bot, bot.AVATAR_FROM_USER)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
|
2017-03-26 08:17:48 +02:00
|
|
|
self.assertEqual(events[1]['type'], 'realm_user')
|
2014-02-26 21:05:10 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
def test_change_realm_icon_source(self):
|
|
|
|
# type: () -> None
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
action = lambda: do_change_icon_source(realm, realm.ICON_FROM_GRAVATAR)
|
|
|
|
events = self.do_test(action, state_change_expected=False)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-02-26 20:35:23 +01:00
|
|
|
('type', equals('realm')),
|
|
|
|
('op', equals('update_dict')),
|
|
|
|
('property', equals('icon')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('data', check_dict_only([
|
|
|
|
('icon_url', check_string),
|
|
|
|
('icon_source', check_string),
|
|
|
|
])),
|
2017-02-21 03:41:20 +01:00
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 21:15:31 +01:00
|
|
|
def test_change_bot_default_all_public_streams(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
|
|
|
action = lambda: do_change_default_all_public_streams(bot, True)
|
2014-02-26 21:15:31 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
|
2014-02-26 21:15:31 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 21:23:18 +01:00
|
|
|
def test_change_bot_default_sending_stream(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, stream)
|
2014-02-26 21:23:18 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
|
2014-02-26 21:23:18 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 03:04:13 +01:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, None)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = self.realm_bot_schema('default_sending_stream', equals(None))('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-02-26 21:34:12 +01:00
|
|
|
def test_change_bot_default_events_register_stream(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-02-21 19:35:17 +01:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, stream)
|
2014-02-26 21:34:12 +01:00
|
|
|
events = self.do_test(action)
|
2014-03-06 16:50:28 +01:00
|
|
|
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
|
2014-02-26 21:34:12 +01:00
|
|
|
self.assert_on_error(error)
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2017-03-24 03:04:13 +01:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, None)
|
|
|
|
events = self.do_test(action)
|
|
|
|
error = self.realm_bot_schema('default_events_register_stream', equals(None))('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-24 06:36:54 +01:00
|
|
|
def test_change_bot_owner(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
change_bot_owner_checker = self.check_events_dict([
|
2017-02-24 06:36:54 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('update')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2017-02-24 06:36:54 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('owner_id', check_int),
|
|
|
|
])),
|
|
|
|
])
|
2017-05-07 17:21:26 +02:00
|
|
|
self.user_profile = self.example_user('iago')
|
|
|
|
owner = self.example_user('hamlet')
|
2017-02-24 06:36:54 +01:00
|
|
|
bot = self.create_bot('test-bot@zulip.com')
|
2017-03-31 17:27:08 +02:00
|
|
|
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
|
2017-02-24 06:36:54 +01:00
|
|
|
events = self.do_test(action)
|
|
|
|
error = change_bot_owner_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2014-02-26 21:34:12 +01:00
|
|
|
|
2014-02-26 22:27:51 +01:00
|
|
|
def test_do_deactivate_user(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
bot_deactivate_checker = self.check_events_dict([
|
2014-02-26 22:27:51 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('remove')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2014-02-26 22:27:51 +01:00
|
|
|
('email', check_string),
|
|
|
|
('full_name', check_string),
|
2017-04-20 17:31:41 +02:00
|
|
|
('user_id', check_int),
|
2014-02-26 22:27:51 +01:00
|
|
|
])),
|
|
|
|
])
|
|
|
|
bot = self.create_bot('foo-bot@zulip.com')
|
|
|
|
action = lambda: do_deactivate_user(bot)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-02-26 22:27:51 +01:00
|
|
|
error = bot_deactivate_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-15 21:06:07 +01:00
|
|
|
def test_do_reactivate_user(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
bot_reactivate_checker = self.check_events_dict([
|
2017-02-15 21:06:07 +01:00
|
|
|
('type', equals('realm_bot')),
|
|
|
|
('op', equals('add')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('bot', check_dict_only([
|
2017-02-15 21:06:07 +01:00
|
|
|
('email', check_string),
|
|
|
|
('user_id', check_int),
|
|
|
|
('full_name', check_string),
|
|
|
|
('is_active', check_bool),
|
|
|
|
('api_key', check_string),
|
|
|
|
('default_sending_stream', check_none_or(check_string)),
|
|
|
|
('default_events_register_stream', check_none_or(check_string)),
|
|
|
|
('default_all_public_streams', check_bool),
|
|
|
|
('avatar_url', check_string),
|
|
|
|
('owner', check_none_or(check_string)),
|
|
|
|
])),
|
|
|
|
])
|
|
|
|
bot = self.create_bot('foo-bot@zulip.com')
|
|
|
|
do_deactivate_user(bot)
|
|
|
|
action = lambda: do_reactivate_user(bot)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2017-02-15 21:06:07 +01:00
|
|
|
error = bot_reactivate_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
|
|
|
def test_do_mark_hotspot_as_read(self):
|
|
|
|
# type: () -> None
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2017-01-24 01:48:35 +01:00
|
|
|
('type', equals('hotspots')),
|
|
|
|
('hotspots', check_list(check_string)),
|
|
|
|
])
|
|
|
|
events = self.do_test(lambda: do_mark_hotspot_as_read(self.user_profile, 'welcome'))
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-02-15 21:06:07 +01:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_rename_stream(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2016-10-21 22:13:43 +02:00
|
|
|
stream = self.make_stream('old_name')
|
2014-02-02 15:30:33 +01:00
|
|
|
new_name = u'stream with a brand new name'
|
|
|
|
self.subscribe_to_stream(self.user_profile.email, stream.name)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-01-30 04:05:39 +01:00
|
|
|
action = lambda: do_rename_stream(stream, new_name)
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('email_address')),
|
|
|
|
('value', check_string),
|
2017-03-05 01:50:25 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', equals('old_name')),
|
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('name')),
|
|
|
|
('value', equals(new_name)),
|
|
|
|
('name', equals('old_name')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
])
|
|
|
|
error = schema_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
def test_deactivate_stream_neversubscribed(self):
|
|
|
|
# type: () -> None
|
2016-10-21 22:13:43 +02:00
|
|
|
stream = self.make_stream('old_name')
|
2016-07-12 23:57:16 +02:00
|
|
|
|
|
|
|
action = lambda: do_deactivate_stream(stream)
|
|
|
|
events = self.do_test(action)
|
|
|
|
|
2017-04-20 17:31:41 +02:00
|
|
|
schema_checker = self.check_events_dict([
|
2016-07-12 23:57:16 +02:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('delete')),
|
|
|
|
('streams', check_list(check_dict([]))),
|
|
|
|
])
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_subscribe_other_user_never_subscribed(self):
|
2016-07-30 01:06:02 +02:00
|
|
|
# type: () -> None
|
2017-05-25 02:08:35 +02:00
|
|
|
action = lambda: self.subscribe_to_stream(self.example_email("othello"), u"test_stream")
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, num_events=2)
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_add_schema_checker = self.check_events_dict([
|
2016-07-12 23:57:16 +02:00
|
|
|
('type', equals('subscription')),
|
|
|
|
('op', equals('peer_add')),
|
2016-10-31 20:18:32 +01:00
|
|
|
('user_id', check_int),
|
2016-07-12 23:57:16 +02:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-03-24 05:49:23 +01:00
|
|
|
error = peer_add_schema_checker('events[1]', events[1])
|
2016-07-12 23:57:16 +02:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
def test_subscribe_events(self):
|
2016-07-30 01:06:02 +02:00
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
self.do_test_subscribe_events(include_subscribers=True)
|
2017-02-20 08:30:09 +01:00
|
|
|
|
|
|
|
def test_subscribe_events_no_include_subscribers(self):
|
|
|
|
# type: () -> None
|
2017-02-21 19:35:17 +01:00
|
|
|
self.do_test_subscribe_events(include_subscribers=False)
|
2017-02-20 08:30:09 +01:00
|
|
|
|
|
|
|
def do_test_subscribe_events(self, include_subscribers):
|
|
|
|
# type: (bool) -> None
|
|
|
|
subscription_fields = [
|
|
|
|
('color', check_string),
|
|
|
|
('description', check_string),
|
|
|
|
('email_address', check_string),
|
|
|
|
('invite_only', check_bool),
|
|
|
|
('in_home_view', check_bool),
|
|
|
|
('name', check_string),
|
|
|
|
('desktop_notifications', check_bool),
|
|
|
|
('audible_notifications', check_bool),
|
|
|
|
('stream_id', check_int),
|
|
|
|
]
|
|
|
|
if include_subscribers:
|
|
|
|
subscription_fields.append(('subscribers', check_list(check_int))) # type: ignore
|
2014-02-04 20:52:02 +01:00
|
|
|
subscription_schema_checker = check_list(
|
2017-04-20 17:31:41 +02:00
|
|
|
check_dict(subscription_fields), # TODO: Can this be converted to check_dict_only?
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
2017-04-20 17:31:41 +02:00
|
|
|
stream_create_schema_checker = self.check_events_dict([
|
2017-03-24 05:49:23 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('create')),
|
2017-04-20 17:31:41 +02:00
|
|
|
('streams', check_list(check_dict_only([
|
2017-03-24 05:49:23 +01:00
|
|
|
('name', check_string),
|
|
|
|
('stream_id', check_int),
|
|
|
|
('invite_only', check_bool),
|
|
|
|
('description', check_string),
|
|
|
|
]))),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
add_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('add')),
|
|
|
|
('subscriptions', subscription_schema_checker),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
remove_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('remove')),
|
|
|
|
('subscriptions', check_list(
|
2017-04-20 17:31:41 +02:00
|
|
|
check_dict_only([
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', equals('test_stream')),
|
2014-02-07 19:06:02 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
]),
|
|
|
|
)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_add_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('peer_add')),
|
2016-10-31 20:18:32 +01:00
|
|
|
('user_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
peer_remove_schema_checker = self.check_events_dict([
|
2014-02-06 21:21:21 +01:00
|
|
|
('type', equals('subscription')),
|
2014-02-04 20:52:02 +01:00
|
|
|
('op', equals('peer_remove')),
|
2016-11-08 15:04:18 +01:00
|
|
|
('user_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('subscriptions', check_list(check_string)),
|
|
|
|
])
|
2017-04-20 17:31:41 +02:00
|
|
|
stream_update_schema_checker = self.check_events_dict([
|
2014-02-04 20:52:02 +01:00
|
|
|
('type', equals('stream')),
|
|
|
|
('op', equals('update')),
|
|
|
|
('property', equals('description')),
|
|
|
|
('value', check_string),
|
2017-03-05 01:50:25 +01:00
|
|
|
('stream_id', check_int),
|
2014-02-04 20:52:02 +01:00
|
|
|
('name', check_string),
|
|
|
|
])
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Subscribe to a totally new stream, so it's just Hamlet on it
|
2017-05-25 01:40:26 +02:00
|
|
|
action = lambda: self.subscribe_to_stream(self.example_email("hamlet"), "test_stream") # type: Callable
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action, event_types=["subscription", "realm_user"],
|
|
|
|
include_subscribers=include_subscribers)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = add_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Add another user to that totally new stream
|
2017-05-25 02:08:35 +02:00
|
|
|
action = lambda: self.subscribe_to_stream(self.example_email("othello"), "test_stream")
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-02-21 19:35:17 +01:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = peer_add_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now remove the first user, to test the normal unsubscribe flow
|
2016-10-20 16:53:22 +02:00
|
|
|
action = lambda: bulk_remove_subscriptions(
|
2017-05-07 17:21:26 +02:00
|
|
|
[self.example_user('othello')],
|
2016-10-20 16:53:22 +02:00
|
|
|
[stream])
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-02-21 19:35:17 +01:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = peer_remove_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now remove the second user, to test the 'vacate' event flow
|
2016-10-20 16:53:22 +02:00
|
|
|
action = lambda: bulk_remove_subscriptions(
|
2017-05-07 17:21:26 +02:00
|
|
|
[self.example_user('hamlet')],
|
2016-10-20 16:53:22 +02:00
|
|
|
[stream])
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-03-26 08:17:48 +02:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
num_events=2)
|
2014-03-02 06:46:54 +01:00
|
|
|
error = remove_schema_checker('events[1]', events[1])
|
2014-02-04 20:52:02 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-02-22 09:34:12 +01:00
|
|
|
# Now resubscribe a user, to make sure that works on a vacated stream
|
2017-05-25 01:40:26 +02:00
|
|
|
action = lambda: self.subscribe_to_stream(self.example_email("hamlet"), "test_stream")
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
2017-03-26 08:17:48 +02:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
num_events=2)
|
2014-03-02 06:46:54 +01:00
|
|
|
error = add_schema_checker('events[1]', events[1])
|
2014-02-04 20:52:02 +01:00
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-01-30 04:14:12 +01:00
|
|
|
action = lambda: do_change_stream_description(stream, u'new description')
|
2017-02-20 08:30:09 +01:00
|
|
|
events = self.do_test(action,
|
|
|
|
include_subscribers=include_subscribers)
|
2014-02-04 20:52:02 +01:00
|
|
|
error = stream_update_schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-03-24 05:49:23 +01:00
|
|
|
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
|
|
|
|
stream = self.make_stream("private", get_realm("zulip"), invite_only=True)
|
2017-05-07 17:21:26 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-03-24 05:49:23 +01:00
|
|
|
action = lambda: bulk_add_subscriptions([stream], [user_profile])
|
2017-03-26 08:17:48 +02:00
|
|
|
events = self.do_test(action, include_subscribers=include_subscribers,
|
|
|
|
num_events=2)
|
2017-03-24 05:49:23 +01:00
|
|
|
error = stream_create_schema_checker('events[0]', events[0])
|
|
|
|
error = add_schema_checker('events[1]', events[1])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
2017-05-14 21:14:26 +02:00
|
|
|
def test_do_delete_message(self):
|
|
|
|
# type: () -> None
|
|
|
|
schema_checker = self.check_events_dict([
|
|
|
|
('type', equals('delete_message')),
|
|
|
|
('message_id', check_int),
|
|
|
|
('sender', check_string),
|
|
|
|
])
|
|
|
|
msg_id = self.send_message("hamlet@zulip.com", "Verona", Recipient.STREAM)
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
events = self.do_test(
|
|
|
|
lambda: do_delete_message(self.user_profile, message),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
error = schema_checker('events[0]', events[0])
|
|
|
|
self.assert_on_error(error)
|
|
|
|
|
|
|
|
def test_do_delete_message_no_max_id(self):
|
|
|
|
# type: () -> None
|
|
|
|
user_profile = self.example_user('aaron')
|
|
|
|
# Delete all historical messages for this user
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
|
|
|
msg_id = self.send_message("hamlet@zulip.com", "Verona", Recipient.STREAM)
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
self.do_test(
|
|
|
|
lambda: do_delete_message(self.user_profile, message),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
result = fetch_initial_state_data(user_profile, None, "")
|
|
|
|
self.assertEqual(result['max_message_id'], -1)
|
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class FetchInitialStateDataTest(ZulipTestCase):
|
2016-04-28 01:15:06 +02:00
|
|
|
# Non-admin users don't have access to all bots
|
|
|
|
def test_realm_bots_non_admin(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('cordelia')
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_admin)
|
|
|
|
result = fetch_initial_state_data(user_profile, None, "")
|
|
|
|
self.assert_length(result['realm_bots'], 0)
|
|
|
|
|
|
|
|
# additionally the API key for a random bot is not present in the data
|
2017-05-08 17:42:50 +02:00
|
|
|
api_key = self.notification_bot().api_key
|
2016-04-28 01:15:06 +02:00
|
|
|
self.assertNotIn(api_key, str(result))
|
|
|
|
|
|
|
|
# Admin users have access to all bots in the realm_bots field
|
|
|
|
def test_realm_bots_admin(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 19:39:30 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2016-04-28 01:15:06 +02:00
|
|
|
do_change_is_admin(user_profile, True)
|
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
|
|
|
result = fetch_initial_state_data(user_profile, None, "")
|
|
|
|
self.assertTrue(len(result['realm_bots']) > 5)
|
|
|
|
|
2017-03-24 06:38:06 +01:00
|
|
|
def test_max_message_id_with_no_history(self):
|
|
|
|
# type: () -> None
|
2017-05-24 02:42:31 +02:00
|
|
|
user_profile = self.example_user('aaron')
|
2017-03-24 06:38:06 +01:00
|
|
|
# Delete all historical messages for this user
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
|
|
|
result = fetch_initial_state_data(user_profile, None, "")
|
|
|
|
self.assertEqual(result['max_message_id'], -1)
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
class EventQueueTest(TestCase):
|
|
|
|
def test_one_event(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertFalse(queue.empty())
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 0,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"}])
|
|
|
|
|
|
|
|
def test_event_collapsing(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(1, 10):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 8,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 9,
|
|
|
|
"timestamp": "9"}])
|
|
|
|
|
|
|
|
queue = EventQueue("2")
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(1, 10):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
queue.push({"type": "unknown"})
|
|
|
|
queue.push({"type": "restart", "server_generation": "1"})
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(11, 20):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
queue.push({"type": "restart", "server_generation": "2"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{"type": "unknown",
|
2016-11-30 21:55:59 +01:00
|
|
|
"id": 9},
|
2014-01-31 23:23:39 +01:00
|
|
|
{'id': 19,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 19,
|
|
|
|
"timestamp": "19"},
|
|
|
|
{"id": 20,
|
|
|
|
"type": "restart",
|
|
|
|
"server_generation": "2"}])
|
2015-11-01 17:15:05 +01:00
|
|
|
for pointer_val in range(21, 23):
|
2014-01-31 23:23:39 +01:00
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": pointer_val,
|
|
|
|
"timestamp": str(pointer_val)})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{"type": "unknown",
|
2016-11-30 21:55:59 +01:00
|
|
|
"id": 9},
|
2014-01-31 23:23:39 +01:00
|
|
|
{'id': 19,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 19,
|
|
|
|
"timestamp": "19"},
|
|
|
|
{"id": 20,
|
|
|
|
"type": "restart",
|
|
|
|
"server_generation": "2"},
|
|
|
|
{'id': 22,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 22,
|
|
|
|
"timestamp": "22"},
|
|
|
|
])
|
|
|
|
|
|
|
|
def test_flag_add_collapsing(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "read",
|
|
|
|
"operation": "add",
|
|
|
|
"all": False,
|
|
|
|
"messages": [1, 2, 3, 4],
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "read",
|
|
|
|
"all": False,
|
|
|
|
"operation": "add",
|
|
|
|
"messages": [5, 6],
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 1,
|
|
|
|
'type': 'update_message_flags',
|
|
|
|
"all": False,
|
|
|
|
"flag": "read",
|
|
|
|
"operation": "add",
|
2015-11-01 17:10:46 +01:00
|
|
|
"messages": [1, 2, 3, 4, 5, 6],
|
2014-01-31 23:23:39 +01:00
|
|
|
"timestamp": "1"}])
|
|
|
|
|
|
|
|
def test_flag_remove_collapsing(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "collapsed",
|
|
|
|
"operation": "remove",
|
|
|
|
"all": False,
|
|
|
|
"messages": [1, 2, 3, 4],
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "update_message_flags",
|
|
|
|
"flag": "collapsed",
|
|
|
|
"all": False,
|
|
|
|
"operation": "remove",
|
|
|
|
"messages": [5, 6],
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 1,
|
|
|
|
'type': 'update_message_flags',
|
|
|
|
"all": False,
|
|
|
|
"flag": "collapsed",
|
|
|
|
"operation": "remove",
|
2015-11-01 17:10:46 +01:00
|
|
|
"messages": [1, 2, 3, 4, 5, 6],
|
2014-01-31 23:23:39 +01:00
|
|
|
"timestamp": "1"}])
|
|
|
|
|
|
|
|
def test_collapse_event(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-01-31 23:23:39 +01:00
|
|
|
queue = EventQueue("1")
|
|
|
|
queue.push({"type": "pointer",
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"})
|
|
|
|
queue.push({"type": "unknown",
|
|
|
|
"timestamp": "1"})
|
|
|
|
self.assertEqual(queue.contents(),
|
|
|
|
[{'id': 0,
|
|
|
|
'type': 'pointer',
|
|
|
|
"pointer": 1,
|
|
|
|
"timestamp": "1"},
|
|
|
|
{'id': 1,
|
|
|
|
'type': 'unknown',
|
|
|
|
"timestamp": "1"}])
|
|
|
|
|
2017-05-07 21:25:59 +02:00
|
|
|
class TestEventsRegisterAllPublicStreamsDefaults(ZulipTestCase):
|
2014-02-07 01:22:19 +01:00
|
|
|
def setUp(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
|
|
|
self.email = self.user_profile.email
|
2014-02-07 01:22:19 +01:00
|
|
|
|
|
|
|
def test_use_passed_all_public_true_default_false(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_true_default(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, True)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_false_default_false(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
|
|
|
def test_use_passed_all_public_false_default_true(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, False)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
|
|
|
def test_use_true_default_for_none(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = True
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertTrue(result)
|
|
|
|
|
|
|
|
def test_use_false_default_for_none(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_all_public_streams = False
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_all_public_streams(self.user_profile, None)
|
|
|
|
self.assertFalse(result)
|
|
|
|
|
2017-05-07 21:25:59 +02:00
|
|
|
class TestEventsRegisterNarrowDefaults(ZulipTestCase):
|
2014-02-07 01:22:19 +01:00
|
|
|
def setUp(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
|
|
|
self.email = self.user_profile.email
|
2014-02-07 01:22:19 +01:00
|
|
|
self.stream = get_stream('Verona', self.user_profile.realm)
|
|
|
|
|
|
|
|
def test_use_passed_narrow_no_default(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
2016-06-05 04:58:33 +02:00
|
|
|
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
|
|
|
|
self.assertEqual(result, [[u'stream', u'my_stream']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
|
|
|
def test_use_passed_narrow_with_default(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
2016-06-05 04:58:33 +02:00
|
|
|
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
|
|
|
|
self.assertEqual(result, [[u'stream', u'my_stream']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
|
|
|
def test_use_default_if_narrow_is_empty(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = self.stream.id
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
2016-06-05 04:58:33 +02:00
|
|
|
self.assertEqual(result, [[u'stream', u'Verona']])
|
2014-02-07 01:22:19 +01:00
|
|
|
|
|
|
|
def test_use_narrow_if_default_is_none(self):
|
2016-06-04 19:54:34 +02:00
|
|
|
# type: () -> None
|
2014-02-07 01:22:19 +01:00
|
|
|
self.user_profile.default_events_register_stream_id = None
|
|
|
|
self.user_profile.save()
|
|
|
|
result = _default_narrow(self.user_profile, [])
|
|
|
|
self.assertEqual(result, [])
|