2016-11-10 19:30:09 +01:00
|
|
|
from contextlib import contextmanager
|
2016-11-24 19:12:55 +01:00
|
|
|
from typing import (cast, Any, Callable, Dict, Iterable, Iterator, List, Mapping, Optional,
|
2016-12-21 13:17:53 +01:00
|
|
|
Sized, Tuple, Union, Text)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
from django.core.urlresolvers import resolve
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.test import TestCase
|
|
|
|
from django.test.client import (
|
|
|
|
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
|
|
|
|
)
|
|
|
|
from django.template import loader
|
2017-02-16 10:10:37 +01:00
|
|
|
from django.test.testcases import SerializeMixin
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.http import HttpResponse
|
|
|
|
from django.db.utils import IntegrityError
|
|
|
|
|
|
|
|
from zerver.lib.initial_password import initial_password
|
|
|
|
from zerver.lib.db import TimeTrackingCursor
|
|
|
|
from zerver.lib.str_utils import force_text
|
2017-04-28 06:56:44 +02:00
|
|
|
from zerver.lib.utils import is_remote_server
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib import cache
|
2016-11-27 06:36:06 +01:00
|
|
|
from zerver.tornado.handlers import allocate_handler_id
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.worker import queue_processors
|
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
|
|
|
|
get_display_recipient, bulk_remove_subscriptions
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
instrument_url, find_key_by_email,
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.models import (
|
|
|
|
get_stream,
|
2017-05-24 02:42:31 +02:00
|
|
|
get_user,
|
2016-11-10 19:30:09 +01:00
|
|
|
get_user_profile_by_email,
|
2017-07-12 11:56:10 +02:00
|
|
|
get_realm,
|
2016-11-10 19:30:09 +01:00
|
|
|
Client,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.request import JsonableError
|
2016-10-27 23:55:31 +02:00
|
|
|
from zilencer.models import get_remote_server_by_uuid
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
|
|
|
|
import base64
|
|
|
|
import mock
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import time
|
|
|
|
import ujson
|
|
|
|
import unittest
|
|
|
|
from six.moves import urllib
|
2016-12-27 07:09:35 +01:00
|
|
|
from six import binary_type
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.str_utils import NonBinaryStr
|
|
|
|
|
|
|
|
from contextlib import contextmanager
|
|
|
|
|
2017-05-17 21:13:40 +02:00
|
|
|
API_KEYS = {} # type: Dict[Text, Text]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-03-22 11:45:39 +01:00
|
|
|
def flush_caches_for_testing():
|
|
|
|
# type: () -> None
|
|
|
|
global API_KEYS
|
|
|
|
API_KEYS = {}
|
|
|
|
|
2017-02-16 10:10:37 +01:00
|
|
|
class UploadSerializeMixin(SerializeMixin):
|
|
|
|
"""
|
|
|
|
We cannot use override_settings to change upload directory because
|
|
|
|
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
|
|
|
|
are compiled only once. Otherwise using a different upload directory
|
|
|
|
for conflicting test cases would have provided better performance
|
|
|
|
while providing the required isolation.
|
|
|
|
"""
|
|
|
|
lockfile = 'var/upload_lock'
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
|
|
|
if not os.path.exists(cls.lockfile):
|
2017-03-05 09:01:49 +01:00
|
|
|
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
|
2017-02-16 10:10:37 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
class ZulipTestCase(TestCase):
|
2017-03-21 15:34:16 +01:00
|
|
|
# Ensure that the test system just shows us diffs
|
2017-05-24 04:21:29 +02:00
|
|
|
maxDiff = None # type: Optional[int]
|
2017-03-21 15:34:16 +01:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
'''
|
|
|
|
WRAPPER_COMMENT:
|
|
|
|
|
|
|
|
We wrap calls to self.client.{patch,put,get,post,delete} for various
|
|
|
|
reasons. Some of this has to do with fixing encodings before calling
|
|
|
|
into the Django code. Some of this has to do with providing a future
|
|
|
|
path for instrumentation. Some of it's just consistency.
|
|
|
|
|
|
|
|
The linter will prevent direct calls to self.client.foo, so the wrapper
|
|
|
|
functions have to fake out the linter by using a local variable called
|
|
|
|
django_client to fool the regext.
|
|
|
|
'''
|
2017-08-26 01:33:53 +02:00
|
|
|
DEFAULT_SUBDOMAIN = "zulip"
|
2016-11-18 01:51:13 +01:00
|
|
|
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-08-26 00:02:02 +02:00
|
|
|
def set_http_host(self, kwargs):
|
|
|
|
# type: (Dict[str, Any]) -> None
|
|
|
|
if 'subdomain' in kwargs:
|
|
|
|
if kwargs['subdomain'] != "":
|
|
|
|
kwargs["HTTP_HOST"] = "%s.%s" % (kwargs["subdomain"], settings.EXTERNAL_HOST)
|
|
|
|
else:
|
|
|
|
kwargs["HTTP_HOST"] = settings.EXTERNAL_HOST
|
2017-08-26 00:40:17 +02:00
|
|
|
del kwargs['subdomain']
|
2017-08-25 22:02:00 +02:00
|
|
|
elif 'HTTP_HOST' not in kwargs:
|
2017-08-26 01:33:53 +02:00
|
|
|
if self.DEFAULT_SUBDOMAIN == "":
|
|
|
|
kwargs["HTTP_HOST"] = settings.EXTERNAL_HOST
|
|
|
|
else:
|
|
|
|
kwargs["HTTP_HOST"] = "%s.%s" % (self.DEFAULT_SUBDOMAIN,
|
|
|
|
settings.EXTERNAL_HOST,)
|
2017-08-26 00:02:02 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
|
|
|
def client_patch(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
We need to urlencode, since Django's function won't do it for us.
|
|
|
|
"""
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.patch(url, encoded, **kwargs)
|
|
|
|
|
|
|
|
@instrument_url
|
|
|
|
def client_patch_multipart(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Use this for patch requests that have file uploads or
|
|
|
|
that need some sort of multi-part content. In the future
|
|
|
|
Django's test client may become a bit more flexible,
|
|
|
|
so we can hopefully eliminate this. (When you post
|
|
|
|
with the Django test client, it deals with MULTIPART_CONTENT
|
|
|
|
automatically, but not patch.)
|
|
|
|
"""
|
|
|
|
encoded = encode_multipart(BOUNDARY, info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.patch(
|
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
content_type=MULTIPART_CONTENT,
|
|
|
|
**kwargs)
|
|
|
|
|
|
|
|
@instrument_url
|
|
|
|
def client_put(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.put(url, encoded, **kwargs)
|
|
|
|
|
2016-12-21 21:29:29 +01:00
|
|
|
@instrument_url
|
2016-11-10 19:30:09 +01:00
|
|
|
def client_delete(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.delete(url, encoded, **kwargs)
|
|
|
|
|
2017-03-05 09:31:17 +01:00
|
|
|
@instrument_url
|
|
|
|
def client_options(self, url, info={}, **kwargs):
|
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2017-03-05 09:31:17 +01:00
|
|
|
return django_client.options(url, encoded, **kwargs)
|
|
|
|
|
2017-08-26 01:24:50 +02:00
|
|
|
@instrument_url
|
|
|
|
def client_head(self, url, info={}, **kwargs):
|
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
self.set_http_host(kwargs)
|
|
|
|
return django_client.head(url, encoded, **kwargs)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
|
|
|
def client_post(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.post(url, info, **kwargs)
|
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
@instrument_url
|
|
|
|
def client_post_request(self, url, req):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Any) -> HttpResponse
|
2016-11-17 16:52:28 +01:00
|
|
|
"""
|
|
|
|
We simulate hitting an endpoint here, although we
|
|
|
|
actually resolve the URL manually and hit the view
|
|
|
|
directly. We have this helper method to allow our
|
|
|
|
instrumentation to work for /notify_tornado and
|
|
|
|
future similar methods that require doing funny
|
|
|
|
things to a request object.
|
|
|
|
"""
|
|
|
|
|
|
|
|
match = resolve(url)
|
|
|
|
return match.func(req)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
|
|
|
def client_get(self, url, info={}, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Dict[str, Any], **Any) -> HttpResponse
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.get(url, info, **kwargs)
|
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
example_user_map = dict(
|
2017-05-25 00:17:02 +02:00
|
|
|
hamlet=u'hamlet@zulip.com',
|
|
|
|
cordelia=u'cordelia@zulip.com',
|
|
|
|
iago=u'iago@zulip.com',
|
|
|
|
prospero=u'prospero@zulip.com',
|
|
|
|
othello=u'othello@zulip.com',
|
|
|
|
AARON=u'AARON@zulip.com',
|
|
|
|
aaron=u'aaron@zulip.com',
|
|
|
|
ZOE=u'ZOE@zulip.com',
|
2017-08-25 08:14:55 +02:00
|
|
|
webhook_bot=u'webhook-bot@zulip.com',
|
2017-09-26 11:34:31 +02:00
|
|
|
outgoing_webhook_bot=u'outgoing-webhook@zulip.com'
|
2017-05-07 17:21:26 +02:00
|
|
|
)
|
|
|
|
|
2017-05-23 01:26:38 +02:00
|
|
|
mit_user_map = dict(
|
2017-05-25 00:17:02 +02:00
|
|
|
sipbtest=u"sipbtest@mit.edu",
|
|
|
|
starnine=u"starnine@mit.edu",
|
|
|
|
espuser=u"espuser@mit.edu",
|
2017-05-23 01:26:38 +02:00
|
|
|
)
|
|
|
|
|
2017-05-24 02:42:31 +02:00
|
|
|
# Non-registered test users
|
|
|
|
nonreg_user_map = dict(
|
2017-05-25 00:17:02 +02:00
|
|
|
test=u'test@zulip.com',
|
|
|
|
test1=u'test1@zulip.com',
|
|
|
|
alice=u'alice@zulip.com',
|
|
|
|
newuser=u'newuser@zulip.com',
|
|
|
|
bob=u'bob@zulip.com',
|
|
|
|
cordelia=u'cordelia@zulip.com',
|
|
|
|
newguy=u'newguy@zulip.com',
|
|
|
|
me=u'me@zulip.com',
|
2017-05-24 02:42:31 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def nonreg_user(self, name):
|
|
|
|
# type: (str) -> UserProfile
|
|
|
|
email = self.nonreg_user_map[name]
|
2017-08-25 06:35:48 +02:00
|
|
|
return get_user(email, get_realm("zulip"))
|
2017-05-24 02:42:31 +02:00
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
def example_user(self, name):
|
|
|
|
# type: (str) -> UserProfile
|
|
|
|
email = self.example_user_map[name]
|
2017-07-12 11:56:10 +02:00
|
|
|
return get_user(email, get_realm('zulip'))
|
2017-05-23 01:26:38 +02:00
|
|
|
|
|
|
|
def mit_user(self, name):
|
|
|
|
# type: (str) -> UserProfile
|
|
|
|
email = self.mit_user_map[name]
|
2017-07-12 11:57:34 +02:00
|
|
|
return get_user(email, get_realm('zephyr'))
|
2017-05-07 17:21:26 +02:00
|
|
|
|
2017-05-24 02:42:31 +02:00
|
|
|
def nonreg_email(self, name):
|
2017-05-25 00:17:02 +02:00
|
|
|
# type: (str) -> Text
|
2017-05-24 02:42:31 +02:00
|
|
|
return self.nonreg_user_map[name]
|
|
|
|
|
2017-05-23 23:35:03 +02:00
|
|
|
def example_email(self, name):
|
2017-05-25 00:17:02 +02:00
|
|
|
# type: (str) -> Text
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.example_user_map[name]
|
|
|
|
|
|
|
|
def mit_email(self, name):
|
2017-05-25 00:17:02 +02:00
|
|
|
# type: (str) -> Text
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.mit_user_map[name]
|
|
|
|
|
2017-05-08 17:42:50 +02:00
|
|
|
def notification_bot(self):
|
|
|
|
# type: () -> UserProfile
|
2017-07-12 12:08:52 +02:00
|
|
|
return get_user('notification-bot@zulip.com', get_realm('zulip'))
|
2017-05-08 17:42:50 +02:00
|
|
|
|
2017-08-29 07:40:56 +02:00
|
|
|
def login_with_return(self, email, password=None, **kwargs):
|
|
|
|
# type: (Text, Optional[Text], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
|
|
|
return self.client_post('/accounts/login/',
|
2017-08-29 07:40:56 +02:00
|
|
|
{'username': email, 'password': password},
|
|
|
|
**kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
def login(self, email, password=None, fails=False):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Optional[Text], bool) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
|
|
|
if not fails:
|
|
|
|
self.assertTrue(self.client.login(username=email, password=password))
|
|
|
|
else:
|
|
|
|
self.assertFalse(self.client.login(username=email, password=password))
|
|
|
|
|
2017-04-18 03:23:32 +02:00
|
|
|
def logout(self):
|
|
|
|
# type: () -> None
|
|
|
|
self.client.logout()
|
|
|
|
|
2017-08-26 01:08:14 +02:00
|
|
|
def register(self, email, password, **kwargs):
|
|
|
|
# type: (Text, Text, **Any) -> HttpResponse
|
|
|
|
self.client_post('/accounts/home/', {'email': email},
|
|
|
|
**kwargs)
|
|
|
|
return self.submit_reg_form_for_user(email, password, **kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-01-04 08:53:56 +01:00
|
|
|
def submit_reg_form_for_user(self, email, password, realm_name="Zulip Test",
|
2017-06-27 00:07:42 +02:00
|
|
|
realm_subdomain="zuliptest",
|
2017-05-04 15:19:50 +02:00
|
|
|
from_confirmation='', full_name=None, timezone=u'', **kwargs):
|
2017-06-27 00:07:42 +02:00
|
|
|
# type: (Text, Text, Optional[Text], Optional[Text], Optional[Text], Optional[Text], Optional[Text], **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Stage two of the two-step registration process.
|
|
|
|
|
|
|
|
If things are working correctly the account should be fully
|
|
|
|
registered after this call.
|
|
|
|
|
|
|
|
You can pass the HTTP_HOST variable for subdomains via kwargs.
|
|
|
|
"""
|
2017-02-08 05:04:14 +01:00
|
|
|
if full_name is None:
|
|
|
|
full_name = email.replace("@", "_")
|
2016-11-10 19:30:09 +01:00
|
|
|
return self.client_post('/accounts/register/',
|
2017-02-08 05:04:14 +01:00
|
|
|
{'full_name': full_name,
|
|
|
|
'password': password,
|
2016-11-10 19:30:09 +01:00
|
|
|
'realm_name': realm_name,
|
|
|
|
'realm_subdomain': realm_subdomain,
|
2017-01-04 08:53:56 +01:00
|
|
|
'key': find_key_by_email(email),
|
2017-05-04 15:19:50 +02:00
|
|
|
'timezone': timezone,
|
2016-11-10 19:30:09 +01:00
|
|
|
'terms': True,
|
|
|
|
'from_confirmation': from_confirmation},
|
|
|
|
**kwargs)
|
|
|
|
|
|
|
|
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Text) -> Text
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.core.mail import outbox
|
|
|
|
for message in reversed(outbox):
|
|
|
|
if email_address in message.to:
|
|
|
|
return re.search(settings.EXTERNAL_HOST + path_pattern,
|
|
|
|
message.body).groups()[0]
|
|
|
|
else:
|
2017-03-05 09:01:49 +01:00
|
|
|
raise AssertionError("Couldn't find a confirmation email.")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-04-27 11:41:27 +02:00
|
|
|
def api_auth(self, identifier):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Dict[str, Text]
|
2017-04-27 11:41:27 +02:00
|
|
|
"""
|
|
|
|
identifier: Can be an email or a remote server uuid.
|
|
|
|
"""
|
2017-08-25 08:33:00 +02:00
|
|
|
if identifier in API_KEYS:
|
|
|
|
api_key = API_KEYS[identifier]
|
2016-10-27 23:55:31 +02:00
|
|
|
else:
|
2017-08-25 08:33:00 +02:00
|
|
|
if is_remote_server(identifier):
|
|
|
|
api_key = get_remote_server_by_uuid(identifier).api_key
|
|
|
|
else:
|
|
|
|
api_key = get_user_profile_by_email(identifier).api_key
|
|
|
|
API_KEYS[identifier] = api_key
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2017-04-27 11:41:27 +02:00
|
|
|
credentials = u"%s:%s" % (identifier, api_key)
|
2016-11-10 19:30:09 +01:00
|
|
|
return {
|
|
|
|
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
|
|
|
|
}
|
|
|
|
|
2017-07-12 13:07:48 +02:00
|
|
|
def get_streams(self, email, realm):
|
|
|
|
# type: (Text, Realm) -> List[Text]
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Helper function to get the stream names for a user
|
|
|
|
"""
|
2017-07-12 13:07:48 +02:00
|
|
|
user_profile = get_user(email, realm)
|
2016-11-10 19:30:09 +01:00
|
|
|
subs = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM)
|
2016-12-27 07:09:35 +01:00
|
|
|
return [cast(Text, get_display_recipient(sub.recipient)) for sub in subs]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
def send_message(self, sender_name, raw_recipients, message_type,
|
|
|
|
content=u"test content", subject=u"test", **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Union[Text, List[Text]], int, Text, Text, **Any) -> int
|
2016-11-10 19:30:09 +01:00
|
|
|
sender = get_user_profile_by_email(sender_name)
|
2017-03-14 09:14:40 +01:00
|
|
|
if message_type in [Recipient.PERSONAL, Recipient.HUDDLE]:
|
2016-11-10 19:30:09 +01:00
|
|
|
message_type_name = "private"
|
2017-09-27 21:00:58 +02:00
|
|
|
elif message_type == Recipient.STREAM:
|
2016-11-10 19:30:09 +01:00
|
|
|
message_type_name = "stream"
|
2017-09-27 21:00:58 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Recipient type should be an Recipient.STREAM type enum")
|
2017-09-27 10:06:17 +02:00
|
|
|
if isinstance(raw_recipients, str):
|
2016-11-10 19:30:09 +01:00
|
|
|
recipient_list = [raw_recipients]
|
|
|
|
else:
|
|
|
|
recipient_list = raw_recipients
|
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
|
|
|
|
|
|
|
return check_send_message(
|
|
|
|
sender, sending_client, message_type_name, recipient_list, subject,
|
|
|
|
content, forged=False, forged_timestamp=None,
|
|
|
|
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
|
|
|
|
|
2017-03-19 01:46:39 +01:00
|
|
|
def get_messages(self, anchor=1, num_before=100, num_after=100,
|
|
|
|
use_first_unread_anchor=False):
|
|
|
|
# type: (int, int, int, bool) -> List[Dict[str, Any]]
|
2016-11-10 19:30:09 +01:00
|
|
|
post_params = {"anchor": anchor, "num_before": num_before,
|
2017-03-19 01:46:39 +01:00
|
|
|
"num_after": num_after,
|
|
|
|
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
|
2016-11-10 19:30:09 +01:00
|
|
|
result = self.client_get("/json/messages", dict(post_params))
|
2017-08-17 08:46:39 +02:00
|
|
|
data = result.json()
|
2016-11-10 19:30:09 +01:00
|
|
|
return data['messages']
|
|
|
|
|
2016-12-04 01:04:55 +01:00
|
|
|
def users_subscribed_to_stream(self, stream_name, realm):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Realm) -> List[UserProfile]
|
2016-11-10 19:30:09 +01:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
|
|
|
|
|
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
2016-12-19 16:17:19 +01:00
|
|
|
def assert_url_serves_contents_of_file(self, url, result):
|
|
|
|
# type: (str, bytes) -> None
|
|
|
|
response = self.client_get(url)
|
|
|
|
data = b"".join(response.streaming_content)
|
2017-02-09 01:32:42 +01:00
|
|
|
self.assertEqual(result, data)
|
2016-12-19 16:17:19 +01:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
def assert_json_success(self, result):
|
|
|
|
# type: (HttpResponse) -> Dict[str, Any]
|
|
|
|
"""
|
|
|
|
Successful POSTs return a 200 and JSON of the form {"result": "success",
|
|
|
|
"msg": ""}.
|
|
|
|
"""
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
json = {'msg': "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, 200, json['msg'])
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "success")
|
|
|
|
# We have a msg key for consistency with errors, but it typically has an
|
|
|
|
# empty value.
|
|
|
|
self.assertIn("msg", json)
|
2017-08-29 06:33:10 +02:00
|
|
|
self.assertNotEqual(json["msg"], "Error parsing JSON in response!")
|
2016-11-10 19:30:09 +01:00
|
|
|
return json
|
|
|
|
|
|
|
|
def get_json_error(self, result, status_code=400):
|
|
|
|
# type: (HttpResponse, int) -> Dict[str, Any]
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
json = {'msg': "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, status_code, msg=json.get('msg'))
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "error")
|
|
|
|
return json['msg']
|
|
|
|
|
|
|
|
def assert_json_error(self, result, msg, status_code=400):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (HttpResponse, Text, int) -> None
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Invalid POSTs return an error status code and JSON of the form
|
|
|
|
{"result": "error", "msg": "reason"}.
|
|
|
|
"""
|
|
|
|
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
|
|
|
|
|
|
|
|
def assert_length(self, queries, count):
|
|
|
|
# type: (Sized, int) -> None
|
|
|
|
actual_count = len(queries)
|
|
|
|
return self.assertTrue(actual_count == count,
|
2016-11-30 14:17:35 +01:00
|
|
|
"len(%s) == %s, != %s" % (queries, actual_count, count))
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
def assert_json_error_contains(self, result, msg_substring, status_code=400):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (HttpResponse, Text, int) -> None
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
|
|
|
|
|
|
|
|
def assert_in_response(self, substring, response):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, HttpResponse) -> None
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertIn(substring, response.content.decode('utf-8'))
|
|
|
|
|
2016-11-19 21:54:00 +01:00
|
|
|
def assert_in_success_response(self, substrings, response):
|
2017-03-18 22:48:44 +01:00
|
|
|
# type: (List[Text], HttpResponse) -> None
|
2016-11-19 21:54:00 +01:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
decoded = response.content.decode('utf-8')
|
|
|
|
for substring in substrings:
|
|
|
|
self.assertIn(substring, decoded)
|
|
|
|
|
2017-04-10 12:35:56 +02:00
|
|
|
def assert_not_in_success_response(self, substrings, response):
|
|
|
|
# type: (List[Text], HttpResponse) -> None
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
decoded = response.content.decode('utf-8')
|
|
|
|
for substring in substrings:
|
|
|
|
self.assertNotIn(substring, decoded)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
def fixture_data(self, type, action, file_type='json'):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Text, Text) -> Text
|
2016-11-10 19:30:09 +01:00
|
|
|
return force_text(open(os.path.join(os.path.dirname(__file__),
|
2017-05-14 00:34:33 +02:00
|
|
|
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type))).read())
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
def make_stream(self, stream_name, realm=None, invite_only=False):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Optional[Realm], Optional[bool]) -> Stream
|
2016-11-10 19:30:09 +01:00
|
|
|
if realm is None:
|
2016-11-18 01:51:13 +01:00
|
|
|
realm = self.DEFAULT_REALM
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
stream = Stream.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
name=stream_name,
|
|
|
|
invite_only=invite_only,
|
|
|
|
)
|
2017-03-05 09:01:49 +01:00
|
|
|
except IntegrityError: # nocoverage -- this is for bugs in the tests
|
2016-11-10 19:30:09 +01:00
|
|
|
raise Exception('''
|
|
|
|
%s already exists
|
|
|
|
|
|
|
|
Please call make_stream with a stream name
|
|
|
|
that is not already in use.''' % (stream_name,))
|
|
|
|
|
|
|
|
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
return stream
|
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
# Subscribe to a stream directly
|
|
|
|
def subscribe(self, user_profile, stream_name):
|
|
|
|
# type: (UserProfile, Text) -> Stream
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
from_stream_creation = False
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
stream, from_stream_creation = create_stream_if_needed(user_profile.realm, stream_name)
|
|
|
|
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
|
|
|
|
return stream
|
|
|
|
|
2017-08-25 06:23:11 +02:00
|
|
|
def unsubscribe(self, user_profile, stream_name):
|
|
|
|
# type: (UserProfile, Text) -> None
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2016-11-10 19:30:09 +01:00
|
|
|
bulk_remove_subscriptions([user_profile], [stream])
|
|
|
|
|
|
|
|
# Subscribe to a stream by making an API request
|
2017-08-26 00:58:13 +02:00
|
|
|
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False,
|
|
|
|
**kwargs):
|
|
|
|
# type: (Text, Iterable[Text], Dict[str, Any], bool, **Any) -> HttpResponse
|
2016-11-10 19:30:09 +01:00
|
|
|
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
|
|
|
|
'invite_only': ujson.dumps(invite_only)}
|
|
|
|
post_data.update(extra_post_data)
|
2017-08-26 08:54:56 +02:00
|
|
|
kw = kwargs.copy()
|
|
|
|
kw.update(self.api_auth(email))
|
2017-08-26 00:58:13 +02:00
|
|
|
result = self.client_post("/api/v1/users/me/subscriptions", post_data,
|
2017-08-26 08:54:56 +02:00
|
|
|
**kw)
|
2016-11-10 19:30:09 +01:00
|
|
|
return result
|
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
def send_json_payload(self, user_profile, url, payload, stream_name=None, **post_params):
|
|
|
|
# type: (UserProfile, Text, Union[Text, Dict[str, Any]], Optional[Text], **Any) -> Message
|
2016-11-10 19:30:09 +01:00
|
|
|
if stream_name is not None:
|
2017-08-25 06:37:47 +02:00
|
|
|
self.subscribe(user_profile, stream_name)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
result = self.client_post(url, payload, **post_params)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check the correct message was sent
|
|
|
|
msg = self.get_last_message()
|
2017-08-25 06:37:47 +02:00
|
|
|
self.assertEqual(msg.sender.email, user_profile.email)
|
2016-11-10 19:30:09 +01:00
|
|
|
if stream_name is not None:
|
|
|
|
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
|
|
|
|
# TODO: should also validate recipient for private messages
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def get_last_message(self):
|
|
|
|
# type: () -> Message
|
|
|
|
return Message.objects.latest('id')
|
|
|
|
|
|
|
|
def get_second_to_last_message(self):
|
|
|
|
# type: () -> Message
|
|
|
|
return Message.objects.all().order_by('-id')[1]
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_markdown_failure(self):
|
2016-11-24 19:12:55 +01:00
|
|
|
# type: () -> Iterator[None]
|
2016-11-10 19:30:09 +01:00
|
|
|
'''
|
|
|
|
This raises a failure inside of the try/except block of
|
|
|
|
bugdown.__init__.do_convert.
|
|
|
|
'''
|
|
|
|
with \
|
|
|
|
self.settings(ERROR_BOT=None), \
|
|
|
|
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
|
|
|
|
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
|
|
|
|
yield
|
|
|
|
|
|
|
|
class WebhookTestCase(ZulipTestCase):
|
|
|
|
"""
|
|
|
|
Common for all webhooks tests
|
|
|
|
|
|
|
|
Override below class attributes and run send_and_test_message
|
|
|
|
If you create your url in uncommon way you can override build_webhook_url method
|
|
|
|
In case that you need modify body or create it without using fixture you can also override get_body method
|
|
|
|
"""
|
2017-05-17 21:13:40 +02:00
|
|
|
STREAM_NAME = None # type: Optional[Text]
|
2016-11-10 19:30:09 +01:00
|
|
|
TEST_USER_EMAIL = 'webhook-bot@zulip.com'
|
2017-05-17 21:13:40 +02:00
|
|
|
URL_TEMPLATE = None # type: Optional[Text]
|
|
|
|
FIXTURE_DIR_NAME = None # type: Optional[Text]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
@property
|
|
|
|
def test_user(self):
|
|
|
|
# type: () -> UserProfile
|
2017-08-25 08:23:13 +02:00
|
|
|
return get_user(self.TEST_USER_EMAIL, get_realm("zulip"))
|
2017-08-25 06:37:47 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
def setUp(self):
|
|
|
|
# type: () -> None
|
|
|
|
self.url = self.build_webhook_url()
|
|
|
|
|
|
|
|
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
|
|
|
|
expected_message=None, content_type="application/json", **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Optional[Text], Optional[Text], Optional[Text], **Any) -> Message
|
2016-11-10 19:30:09 +01:00
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
2017-08-25 06:37:47 +02:00
|
|
|
msg = self.send_json_payload(self.test_user, self.url, payload,
|
2016-11-10 19:30:09 +01:00
|
|
|
self.STREAM_NAME, **kwargs)
|
|
|
|
self.do_test_subject(msg, expected_subject)
|
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def send_and_test_private_message(self, fixture_name, expected_subject=None,
|
|
|
|
expected_message=None, content_type="application/json", **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Text, Text, str, **Any) -> Message
|
2016-11-10 19:30:09 +01:00
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
msg = self.send_json_payload(self.test_user, self.url, payload,
|
2016-11-10 19:30:09 +01:00
|
|
|
stream_name=None, **kwargs)
|
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2017-04-21 23:35:40 +02:00
|
|
|
def build_webhook_url(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Text
|
|
|
|
url = self.URL_TEMPLATE
|
|
|
|
if url.find("api_key") >= 0:
|
2017-08-25 08:23:13 +02:00
|
|
|
api_key = self.test_user.api_key
|
2017-04-21 23:35:40 +02:00
|
|
|
url = self.URL_TEMPLATE.format(api_key=api_key,
|
|
|
|
stream=self.STREAM_NAME)
|
|
|
|
else:
|
|
|
|
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
|
|
|
|
|
|
|
|
has_arguments = kwargs or args
|
|
|
|
if has_arguments and url.find('?') == -1:
|
2017-04-06 23:26:29 +02:00
|
|
|
url = "{}?".format(url)
|
|
|
|
else:
|
|
|
|
url = "{}&".format(url)
|
|
|
|
|
|
|
|
for key, value in kwargs.items():
|
|
|
|
url = "{}{}={}&".format(url, key, value)
|
|
|
|
|
2017-04-21 23:35:40 +02:00
|
|
|
for arg in args:
|
|
|
|
url = "{}{}&".format(url, arg)
|
|
|
|
|
|
|
|
return url[:-1] if has_arguments else url
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
def get_body(self, fixture_name):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Union[Text, Dict[str, Text]]
|
2016-11-10 19:30:09 +01:00
|
|
|
"""Can be implemented either as returning a dictionary containing the
|
|
|
|
post parameters or as string containing the body of the request."""
|
|
|
|
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
|
|
|
|
|
|
|
|
def do_test_subject(self, msg, expected_subject):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Message, Optional[Text]) -> None
|
2016-11-10 19:30:09 +01:00
|
|
|
if expected_subject is not None:
|
|
|
|
self.assertEqual(msg.topic_name(), expected_subject)
|
|
|
|
|
|
|
|
def do_test_message(self, msg, expected_message):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Message, Optional[Text]) -> None
|
2016-11-10 19:30:09 +01:00
|
|
|
if expected_message is not None:
|
|
|
|
self.assertEqual(msg.content, expected_message)
|