2016-11-10 19:30:09 +01:00
|
|
|
from contextlib import contextmanager
|
2019-06-29 04:41:13 +02:00
|
|
|
from email.utils import parseaddr
|
2019-10-16 18:01:38 +02:00
|
|
|
from fakeldap import MockLDAP
|
2019-02-02 23:53:55 +01:00
|
|
|
from typing import (cast, Any, Dict, Iterable, Iterator, List, Optional,
|
2019-05-21 12:21:32 +02:00
|
|
|
Tuple, Union, Set)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-04-09 18:19:55 +02:00
|
|
|
from django.apps import apps
|
|
|
|
from django.db.migrations.state import StateApps
|
2018-01-30 06:05:25 +01:00
|
|
|
from django.urls import resolve
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.test import TestCase
|
|
|
|
from django.test.client import (
|
|
|
|
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
|
|
|
|
)
|
2017-02-16 10:10:37 +01:00
|
|
|
from django.test.testcases import SerializeMixin
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.http import HttpResponse
|
2018-04-09 18:19:55 +02:00
|
|
|
from django.db.migrations.executor import MigrationExecutor
|
|
|
|
from django.db import connection
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.db.utils import IntegrityError
|
2017-07-13 13:42:57 +02:00
|
|
|
from django.http import HttpRequest
|
2019-05-06 00:59:02 +02:00
|
|
|
from django.utils import translation
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
from two_factor.models import PhoneDevice
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.initial_password import initial_password
|
2017-04-28 06:56:44 +02:00
|
|
|
from zerver.lib.utils import is_remote_server
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2019-05-26 22:12:46 +02:00
|
|
|
from zerver.lib.sessions import get_session_dict_user
|
2019-06-21 04:41:30 +02:00
|
|
|
from zerver.lib.webhooks.common import get_fixture_http_headers, standardize_headers
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
|
2019-02-02 23:53:55 +01:00
|
|
|
bulk_remove_subscriptions,
|
2018-05-03 00:07:08 +02:00
|
|
|
check_send_stream_message, gather_subscriptions,
|
|
|
|
get_default_value_for_history_public_to_subscribers,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
|
|
|
|
2017-10-29 17:11:11 +01:00
|
|
|
from zerver.lib.stream_subscription import (
|
|
|
|
get_stream_subscriptions_for_user,
|
|
|
|
)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
instrument_url, find_key_by_email,
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.models import (
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache,
|
2019-05-03 22:52:56 +02:00
|
|
|
flush_per_request_caches,
|
2016-11-10 19:30:09 +01:00
|
|
|
get_stream,
|
2018-03-14 00:13:21 +01:00
|
|
|
get_client,
|
2019-02-02 23:53:55 +01:00
|
|
|
get_display_recipient,
|
2017-11-26 01:45:15 +01:00
|
|
|
get_user,
|
2017-07-12 11:56:10 +02:00
|
|
|
get_realm,
|
2019-07-15 20:58:06 +02:00
|
|
|
get_system_bot,
|
2016-11-10 19:30:09 +01:00
|
|
|
Client,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserProfile,
|
2019-09-18 15:04:17 +02:00
|
|
|
get_realm_stream
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2016-10-27 23:55:31 +02:00
|
|
|
from zilencer.models import get_remote_server_by_uuid
|
2017-07-13 13:42:57 +02:00
|
|
|
from zerver.decorator import do_two_factor_login
|
2018-08-10 22:43:58 +02:00
|
|
|
from zerver.tornado.event_queue import clear_client_event_queues_for_testing
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
import base64
|
|
|
|
import mock
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import ujson
|
2017-11-05 05:30:31 +01:00
|
|
|
import urllib
|
2019-04-04 12:03:54 +02:00
|
|
|
import shutil
|
2019-05-03 22:16:07 +02:00
|
|
|
import tempfile
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
API_KEYS = {} # type: Dict[str, str]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def flush_caches_for_testing() -> None:
|
2017-03-22 11:45:39 +01:00
|
|
|
global API_KEYS
|
|
|
|
API_KEYS = {}
|
|
|
|
|
2017-02-16 10:10:37 +01:00
|
|
|
class UploadSerializeMixin(SerializeMixin):
|
|
|
|
"""
|
|
|
|
We cannot use override_settings to change upload directory because
|
|
|
|
because settings.LOCAL_UPLOADS_DIR is used in url pattern and urls
|
|
|
|
are compiled only once. Otherwise using a different upload directory
|
|
|
|
for conflicting test cases would have provided better performance
|
|
|
|
while providing the required isolation.
|
|
|
|
"""
|
|
|
|
lockfile = 'var/upload_lock'
|
|
|
|
|
|
|
|
@classmethod
|
2017-11-27 05:27:04 +01:00
|
|
|
def setUpClass(cls: Any, *args: Any, **kwargs: Any) -> None:
|
2017-02-16 10:10:37 +01:00
|
|
|
if not os.path.exists(cls.lockfile):
|
2017-03-05 09:01:49 +01:00
|
|
|
with open(cls.lockfile, 'w'): # nocoverage - rare locking case
|
2017-02-16 10:10:37 +01:00
|
|
|
pass
|
|
|
|
|
|
|
|
super(UploadSerializeMixin, cls).setUpClass(*args, **kwargs)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
class ZulipTestCase(TestCase):
|
2017-03-21 15:34:16 +01:00
|
|
|
# Ensure that the test system just shows us diffs
|
2017-05-24 04:21:29 +02:00
|
|
|
maxDiff = None # type: Optional[int]
|
2017-03-21 15:34:16 +01:00
|
|
|
|
2018-08-10 22:43:58 +02:00
|
|
|
def tearDown(self) -> None:
|
|
|
|
super().tearDown()
|
|
|
|
# Important: we need to clear event queues to avoid leaking data to future tests.
|
|
|
|
clear_client_event_queues_for_testing()
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache()
|
2019-05-03 22:52:56 +02:00
|
|
|
flush_per_request_caches()
|
2019-05-06 00:59:02 +02:00
|
|
|
translation.activate(settings.LANGUAGE_CODE)
|
2018-08-10 22:43:58 +02:00
|
|
|
|
2019-10-16 18:01:38 +02:00
|
|
|
# Clean up after using fakeldap in ldap tests:
|
|
|
|
if hasattr(self, 'mock_ldap') and hasattr(self, 'mock_initialize'):
|
|
|
|
if self.mock_ldap is not None:
|
|
|
|
self.mock_ldap.reset()
|
|
|
|
self.mock_initialize.stop()
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
'''
|
|
|
|
WRAPPER_COMMENT:
|
|
|
|
|
|
|
|
We wrap calls to self.client.{patch,put,get,post,delete} for various
|
|
|
|
reasons. Some of this has to do with fixing encodings before calling
|
|
|
|
into the Django code. Some of this has to do with providing a future
|
|
|
|
path for instrumentation. Some of it's just consistency.
|
|
|
|
|
|
|
|
The linter will prevent direct calls to self.client.foo, so the wrapper
|
|
|
|
functions have to fake out the linter by using a local variable called
|
|
|
|
django_client to fool the regext.
|
|
|
|
'''
|
2017-08-26 01:33:53 +02:00
|
|
|
DEFAULT_SUBDOMAIN = "zulip"
|
2016-11-18 01:51:13 +01:00
|
|
|
DEFAULT_REALM = Realm.objects.get(string_id='zulip')
|
2018-06-08 11:06:18 +02:00
|
|
|
TOKENIZED_NOREPLY_REGEX = settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS.format(token="[a-z0-9_]{24}")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def set_http_host(self, kwargs: Dict[str, Any]) -> None:
|
2017-08-26 00:02:02 +02:00
|
|
|
if 'subdomain' in kwargs:
|
2017-10-27 03:27:29 +02:00
|
|
|
kwargs['HTTP_HOST'] = Realm.host_for_subdomain(kwargs['subdomain'])
|
2017-08-26 00:40:17 +02:00
|
|
|
del kwargs['subdomain']
|
2017-08-25 22:02:00 +02:00
|
|
|
elif 'HTTP_HOST' not in kwargs:
|
2017-10-27 03:27:29 +02:00
|
|
|
kwargs['HTTP_HOST'] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN)
|
2017-08-26 00:02:02 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_patch(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
We need to urlencode, since Django's function won't do it for us.
|
|
|
|
"""
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.patch(url, encoded, **kwargs)
|
|
|
|
|
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_patch_multipart(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Use this for patch requests that have file uploads or
|
|
|
|
that need some sort of multi-part content. In the future
|
|
|
|
Django's test client may become a bit more flexible,
|
|
|
|
so we can hopefully eliminate this. (When you post
|
|
|
|
with the Django test client, it deals with MULTIPART_CONTENT
|
|
|
|
automatically, but not patch.)
|
|
|
|
"""
|
|
|
|
encoded = encode_multipart(BOUNDARY, info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.patch(
|
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
content_type=MULTIPART_CONTENT,
|
|
|
|
**kwargs)
|
|
|
|
|
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_put(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.put(url, encoded, **kwargs)
|
|
|
|
|
2016-12-21 21:29:29 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_delete(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.delete(url, encoded, **kwargs)
|
|
|
|
|
2017-03-05 09:31:17 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_options(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2017-03-05 09:31:17 +01:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2017-03-05 09:31:17 +01:00
|
|
|
return django_client.options(url, encoded, **kwargs)
|
|
|
|
|
2017-08-26 01:24:50 +02:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_head(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2017-08-26 01:24:50 +02:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
self.set_http_host(kwargs)
|
|
|
|
return django_client.head(url, encoded, **kwargs)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_post(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.post(url, info, **kwargs)
|
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_post_request(self, url: str, req: Any) -> HttpResponse:
|
2016-11-17 16:52:28 +01:00
|
|
|
"""
|
|
|
|
We simulate hitting an endpoint here, although we
|
|
|
|
actually resolve the URL manually and hit the view
|
|
|
|
directly. We have this helper method to allow our
|
|
|
|
instrumentation to work for /notify_tornado and
|
|
|
|
future similar methods that require doing funny
|
|
|
|
things to a request object.
|
|
|
|
"""
|
|
|
|
|
|
|
|
match = resolve(url)
|
|
|
|
return match.func(req)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2018-05-11 01:40:45 +02:00
|
|
|
def client_get(self, url: str, info: Dict[str, Any]={}, **kwargs: Any) -> HttpResponse:
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2017-08-26 00:02:02 +02:00
|
|
|
self.set_http_host(kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return django_client.get(url, info, **kwargs)
|
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
example_user_map = dict(
|
2017-11-03 03:12:25 +01:00
|
|
|
hamlet='hamlet@zulip.com',
|
|
|
|
cordelia='cordelia@zulip.com',
|
|
|
|
iago='iago@zulip.com',
|
|
|
|
prospero='prospero@zulip.com',
|
|
|
|
othello='othello@zulip.com',
|
|
|
|
AARON='AARON@zulip.com',
|
|
|
|
aaron='aaron@zulip.com',
|
|
|
|
ZOE='ZOE@zulip.com',
|
2018-04-20 22:12:02 +02:00
|
|
|
polonius='polonius@zulip.com',
|
2017-11-03 03:12:25 +01:00
|
|
|
webhook_bot='webhook-bot@zulip.com',
|
|
|
|
welcome_bot='welcome-bot@zulip.com',
|
2019-07-15 20:58:06 +02:00
|
|
|
outgoing_webhook_bot='outgoing-webhook@zulip.com',
|
|
|
|
default_bot='default-bot@zulip.com'
|
2017-05-07 17:21:26 +02:00
|
|
|
)
|
|
|
|
|
2017-05-23 01:26:38 +02:00
|
|
|
mit_user_map = dict(
|
2017-11-03 03:12:25 +01:00
|
|
|
sipbtest="sipbtest@mit.edu",
|
|
|
|
starnine="starnine@mit.edu",
|
|
|
|
espuser="espuser@mit.edu",
|
2017-05-23 01:26:38 +02:00
|
|
|
)
|
|
|
|
|
2018-09-14 12:49:42 +02:00
|
|
|
lear_user_map = dict(
|
|
|
|
cordelia="cordelia@zulip.com",
|
|
|
|
king="king@lear.org"
|
|
|
|
)
|
|
|
|
|
2017-05-24 02:42:31 +02:00
|
|
|
# Non-registered test users
|
|
|
|
nonreg_user_map = dict(
|
2017-11-03 03:12:25 +01:00
|
|
|
test='test@zulip.com',
|
|
|
|
test1='test1@zulip.com',
|
|
|
|
alice='alice@zulip.com',
|
|
|
|
newuser='newuser@zulip.com',
|
|
|
|
bob='bob@zulip.com',
|
|
|
|
cordelia='cordelia@zulip.com',
|
|
|
|
newguy='newguy@zulip.com',
|
|
|
|
me='me@zulip.com',
|
2017-05-24 02:42:31 +02:00
|
|
|
)
|
|
|
|
|
2019-10-18 18:25:51 +02:00
|
|
|
example_user_ldap_username_map = dict(
|
|
|
|
hamlet='hamlet',
|
|
|
|
cordelia='cordelia',
|
|
|
|
# aaron's uid in our test directory is "letham".
|
|
|
|
aaron='letham',
|
|
|
|
)
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def nonreg_user(self, name: str) -> UserProfile:
|
2017-05-24 02:42:31 +02:00
|
|
|
email = self.nonreg_user_map[name]
|
2017-08-25 06:35:48 +02:00
|
|
|
return get_user(email, get_realm("zulip"))
|
2017-05-24 02:42:31 +02:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def example_user(self, name: str) -> UserProfile:
|
2017-05-07 17:21:26 +02:00
|
|
|
email = self.example_user_map[name]
|
2017-07-12 11:56:10 +02:00
|
|
|
return get_user(email, get_realm('zulip'))
|
2017-05-23 01:26:38 +02:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def mit_user(self, name: str) -> UserProfile:
|
2017-05-23 01:26:38 +02:00
|
|
|
email = self.mit_user_map[name]
|
2017-07-12 11:57:34 +02:00
|
|
|
return get_user(email, get_realm('zephyr'))
|
2017-05-07 17:21:26 +02:00
|
|
|
|
2018-09-14 12:49:42 +02:00
|
|
|
def lear_user(self, name: str) -> UserProfile:
|
|
|
|
email = self.lear_user_map[name]
|
|
|
|
return get_user(email, get_realm('lear'))
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def nonreg_email(self, name: str) -> str:
|
2017-05-24 02:42:31 +02:00
|
|
|
return self.nonreg_user_map[name]
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def example_email(self, name: str) -> str:
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.example_user_map[name]
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def mit_email(self, name: str) -> str:
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.mit_user_map[name]
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def notification_bot(self) -> UserProfile:
|
2019-07-15 20:58:06 +02:00
|
|
|
return get_system_bot(settings.NOTIFICATION_BOT)
|
2017-05-08 17:42:50 +02:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def create_test_bot(self, short_name: str, user_profile: UserProfile,
|
|
|
|
assert_json_error_msg: str=None, **extras: Any) -> Optional[UserProfile]:
|
2019-11-16 02:20:23 +01:00
|
|
|
self.login(user_profile.delivery_email)
|
2018-01-30 17:05:14 +01:00
|
|
|
bot_info = {
|
|
|
|
'short_name': short_name,
|
|
|
|
'full_name': 'Foo Bot',
|
|
|
|
}
|
|
|
|
bot_info.update(extras)
|
|
|
|
result = self.client_post("/json/bots", bot_info)
|
|
|
|
if assert_json_error_msg is not None:
|
|
|
|
self.assert_json_error(result, assert_json_error_msg)
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
self.assert_json_success(result)
|
|
|
|
bot_email = '{}-bot@zulip.testserver'.format(short_name)
|
2018-02-25 18:40:03 +01:00
|
|
|
bot_profile = get_user(bot_email, user_profile.realm)
|
2018-01-30 17:05:14 +01:00
|
|
|
return bot_profile
|
2017-10-25 17:17:17 +02:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def login_with_return(self, email: str, password: Optional[str]=None,
|
2017-11-27 05:27:04 +01:00
|
|
|
**kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
2019-01-29 20:32:49 +01:00
|
|
|
result = self.client_post('/accounts/login/',
|
|
|
|
{'username': email, 'password': password},
|
|
|
|
**kwargs)
|
|
|
|
self.assertNotEqual(result.status_code, 500)
|
|
|
|
return result
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def login(self, email: str, password: Optional[str]=None, fails: bool=False,
|
2017-11-27 05:27:04 +01:00
|
|
|
realm: Optional[Realm]=None) -> HttpResponse:
|
2017-11-18 00:11:24 +01:00
|
|
|
if realm is None:
|
|
|
|
realm = get_realm("zulip")
|
2016-11-10 19:30:09 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
|
|
|
if not fails:
|
2017-11-18 00:11:24 +01:00
|
|
|
self.assertTrue(self.client.login(username=email, password=password,
|
2017-11-17 23:56:45 +01:00
|
|
|
realm=realm))
|
2016-11-10 19:30:09 +01:00
|
|
|
else:
|
2017-11-18 00:11:24 +01:00
|
|
|
self.assertFalse(self.client.login(username=email, password=password,
|
2017-11-17 23:56:45 +01:00
|
|
|
realm=realm))
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
def login_2fa(self, user_profile: UserProfile) -> None:
|
|
|
|
"""
|
|
|
|
We need this function to call request.session.save().
|
|
|
|
do_two_factor_login doesn't save session; in normal request-response
|
|
|
|
cycle this doesn't matter because middleware will save the session
|
|
|
|
when it finds it dirty; however,in tests we will have to do that
|
|
|
|
explicitly.
|
|
|
|
"""
|
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
|
|
|
request.user = user_profile
|
|
|
|
do_two_factor_login(request, user_profile)
|
|
|
|
request.session.save()
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def logout(self) -> None:
|
2017-04-18 03:23:32 +02:00
|
|
|
self.client.logout()
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def register(self, email: str, password: str, **kwargs: Any) -> HttpResponse:
|
2017-08-26 01:08:14 +02:00
|
|
|
self.client_post('/accounts/home/', {'email': email},
|
|
|
|
**kwargs)
|
|
|
|
return self.submit_reg_form_for_user(email, password, **kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def submit_reg_form_for_user(
|
2018-05-11 01:40:45 +02:00
|
|
|
self, email: str, password: str,
|
|
|
|
realm_name: Optional[str]="Zulip Test",
|
|
|
|
realm_subdomain: Optional[str]="zuliptest",
|
|
|
|
from_confirmation: Optional[str]='', full_name: Optional[str]=None,
|
|
|
|
timezone: Optional[str]='', realm_in_root_domain: Optional[str]=None,
|
2018-05-22 18:13:51 +02:00
|
|
|
default_stream_groups: Optional[List[str]]=[],
|
|
|
|
source_realm: Optional[str]='', **kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Stage two of the two-step registration process.
|
|
|
|
|
|
|
|
If things are working correctly the account should be fully
|
|
|
|
registered after this call.
|
|
|
|
|
|
|
|
You can pass the HTTP_HOST variable for subdomains via kwargs.
|
|
|
|
"""
|
2017-02-08 05:04:14 +01:00
|
|
|
if full_name is None:
|
|
|
|
full_name = email.replace("@", "_")
|
2019-06-29 04:41:13 +02:00
|
|
|
|
2017-10-19 08:23:27 +02:00
|
|
|
payload = {
|
|
|
|
'full_name': full_name,
|
|
|
|
'password': password,
|
|
|
|
'realm_name': realm_name,
|
|
|
|
'realm_subdomain': realm_subdomain,
|
|
|
|
'key': find_key_by_email(email),
|
|
|
|
'timezone': timezone,
|
|
|
|
'terms': True,
|
|
|
|
'from_confirmation': from_confirmation,
|
2017-11-16 23:00:04 +01:00
|
|
|
'default_stream_group': default_stream_groups,
|
2018-05-22 18:13:51 +02:00
|
|
|
'source_realm': source_realm,
|
2017-10-19 08:23:27 +02:00
|
|
|
}
|
2017-10-19 08:30:40 +02:00
|
|
|
if realm_in_root_domain is not None:
|
|
|
|
payload['realm_in_root_domain'] = realm_in_root_domain
|
2017-10-19 08:23:27 +02:00
|
|
|
return self.client_post('/accounts/register/', payload, **kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def get_confirmation_url_from_outbox(self, email_address: str, *,
|
|
|
|
url_pattern: str=None) -> str:
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.core.mail import outbox
|
2017-10-30 22:56:14 +01:00
|
|
|
if url_pattern is None:
|
|
|
|
# This is a bit of a crude heuristic, but good enough for most tests.
|
2018-07-02 00:05:24 +02:00
|
|
|
url_pattern = settings.EXTERNAL_HOST + r"(\S+)>"
|
2016-11-10 19:30:09 +01:00
|
|
|
for message in reversed(outbox):
|
2019-06-29 04:41:13 +02:00
|
|
|
if email_address in parseaddr(message.to)[1]:
|
2017-10-30 22:56:14 +01:00
|
|
|
return re.search(url_pattern, message.body).groups()[0]
|
2016-11-10 19:30:09 +01:00
|
|
|
else:
|
2017-03-05 09:01:49 +01:00
|
|
|
raise AssertionError("Couldn't find a confirmation email.")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def encode_credentials(self, identifier: str, realm: str="zulip") -> str:
|
2017-04-27 11:41:27 +02:00
|
|
|
"""
|
|
|
|
identifier: Can be an email or a remote server uuid.
|
|
|
|
"""
|
2017-08-25 08:33:00 +02:00
|
|
|
if identifier in API_KEYS:
|
|
|
|
api_key = API_KEYS[identifier]
|
2016-10-27 23:55:31 +02:00
|
|
|
else:
|
2017-08-25 08:33:00 +02:00
|
|
|
if is_remote_server(identifier):
|
|
|
|
api_key = get_remote_server_by_uuid(identifier).api_key
|
|
|
|
else:
|
2018-08-01 10:53:40 +02:00
|
|
|
user = get_user(identifier, get_realm(realm))
|
|
|
|
api_key = get_api_key(user)
|
2017-08-25 08:33:00 +02:00
|
|
|
API_KEYS[identifier] = api_key
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2017-11-03 03:12:25 +01:00
|
|
|
credentials = "%s:%s" % (identifier, api_key)
|
2017-12-15 00:15:32 +01:00
|
|
|
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2019-07-23 11:50:05 +02:00
|
|
|
def api_get(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse:
|
2019-07-23 11:54:06 +02:00
|
|
|
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(identifier, kwargs.get('subdomain', 'zulip'))
|
2017-12-14 19:02:02 +01:00
|
|
|
return self.client_get(*args, **kwargs)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def api_post(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse:
|
2019-07-23 11:54:06 +02:00
|
|
|
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(identifier, kwargs.get('subdomain', 'zulip'))
|
2017-12-14 19:02:02 +01:00
|
|
|
return self.client_post(*args, **kwargs)
|
|
|
|
|
2019-07-23 11:50:05 +02:00
|
|
|
def api_patch(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse:
|
2019-07-23 11:54:06 +02:00
|
|
|
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(identifier, kwargs.get('subdomain', 'zulip'))
|
2017-12-14 19:02:02 +01:00
|
|
|
return self.client_patch(*args, **kwargs)
|
|
|
|
|
2019-07-23 11:50:05 +02:00
|
|
|
def api_delete(self, identifier: str, *args: Any, **kwargs: Any) -> HttpResponse:
|
2019-07-23 11:54:06 +02:00
|
|
|
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(identifier, kwargs.get('subdomain', 'zulip'))
|
2017-12-14 19:02:02 +01:00
|
|
|
return self.client_delete(*args, **kwargs)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def get_streams(self, email: str, realm: Realm) -> List[str]:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Helper function to get the stream names for a user
|
|
|
|
"""
|
2017-07-12 13:07:48 +02:00
|
|
|
user_profile = get_user(email, realm)
|
2017-10-29 17:11:11 +01:00
|
|
|
subs = get_stream_subscriptions_for_user(user_profile).filter(
|
2016-11-10 19:30:09 +01:00
|
|
|
active=True,
|
2017-10-29 17:11:11 +01:00
|
|
|
)
|
2018-05-11 01:40:45 +02:00
|
|
|
return [cast(str, get_display_recipient(sub.recipient)) for sub in subs]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def send_personal_message(self, from_email: str, to_email: str, content: str="test content",
|
2019-11-07 02:51:45 +01:00
|
|
|
sender_realm: str="zulip",
|
|
|
|
sending_client_name: str="test suite") -> int:
|
2017-11-26 01:45:15 +01:00
|
|
|
sender = get_user(from_email, get_realm(sender_realm))
|
2017-10-27 19:28:02 +02:00
|
|
|
|
|
|
|
recipient_list = [to_email]
|
2019-11-07 02:51:45 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
|
2017-10-27 19:28:02 +02:00
|
|
|
|
|
|
|
return check_send_message(
|
|
|
|
sender, sending_client, 'private', recipient_list, None,
|
|
|
|
content
|
|
|
|
)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def send_huddle_message(self, from_email: str, to_emails: List[str], content: str="test content",
|
2019-11-07 02:51:45 +01:00
|
|
|
sender_realm: str="zulip",
|
|
|
|
sending_client_name: str="test suite") -> int:
|
2017-11-26 01:45:15 +01:00
|
|
|
sender = get_user(from_email, get_realm(sender_realm))
|
2017-10-27 19:53:08 +02:00
|
|
|
|
|
|
|
assert(len(to_emails) >= 2)
|
|
|
|
|
2019-11-07 02:51:45 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
|
2017-10-27 19:53:08 +02:00
|
|
|
|
|
|
|
return check_send_message(
|
|
|
|
sender, sending_client, 'private', to_emails, None,
|
|
|
|
content
|
|
|
|
)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def send_stream_message(self, sender_email: str, stream_name: str, content: str="test content",
|
2019-10-15 22:53:28 +02:00
|
|
|
topic_name: str="test", sender_realm: str="zulip",
|
2019-11-07 02:51:45 +01:00
|
|
|
recipient_realm: Optional[Realm]=None,
|
|
|
|
sending_client_name: str="test suite") -> int:
|
2017-11-26 01:45:15 +01:00
|
|
|
sender = get_user(sender_email, get_realm(sender_realm))
|
2017-10-27 17:57:23 +02:00
|
|
|
|
2019-11-07 02:51:45 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name=sending_client_name)
|
2017-10-27 17:57:23 +02:00
|
|
|
|
|
|
|
return check_send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
client=sending_client,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic=topic_name,
|
|
|
|
body=content,
|
2019-10-15 22:53:28 +02:00
|
|
|
realm=recipient_realm,
|
2017-10-27 17:57:23 +02:00
|
|
|
)
|
|
|
|
|
2018-02-14 04:44:41 +01:00
|
|
|
def get_messages_response(self, anchor: int=1, num_before: int=100, num_after: int=100,
|
|
|
|
use_first_unread_anchor: bool=False) -> Dict[str, List[Dict[str, Any]]]:
|
2016-11-10 19:30:09 +01:00
|
|
|
post_params = {"anchor": anchor, "num_before": num_before,
|
2017-03-19 01:46:39 +01:00
|
|
|
"num_after": num_after,
|
|
|
|
"use_first_unread_anchor": ujson.dumps(use_first_unread_anchor)}
|
2016-11-10 19:30:09 +01:00
|
|
|
result = self.client_get("/json/messages", dict(post_params))
|
2017-08-17 08:46:39 +02:00
|
|
|
data = result.json()
|
2018-02-14 04:44:41 +01:00
|
|
|
return data
|
|
|
|
|
|
|
|
def get_messages(self, anchor: int=1, num_before: int=100, num_after: int=100,
|
|
|
|
use_first_unread_anchor: bool=False) -> List[Dict[str, Any]]:
|
|
|
|
data = self.get_messages_response(anchor, num_before, num_after, use_first_unread_anchor)
|
2016-11-10 19:30:09 +01:00
|
|
|
return data['messages']
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def users_subscribed_to_stream(self, stream_name: str, realm: Realm) -> List[UserProfile]:
|
2016-11-10 19:30:09 +01:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
|
|
|
|
|
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def assert_url_serves_contents_of_file(self, url: str, result: bytes) -> None:
|
2016-12-19 16:17:19 +01:00
|
|
|
response = self.client_get(url)
|
|
|
|
data = b"".join(response.streaming_content)
|
2017-02-09 01:32:42 +01:00
|
|
|
self.assertEqual(result, data)
|
2016-12-19 16:17:19 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def assert_json_success(self, result: HttpResponse) -> Dict[str, Any]:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Successful POSTs return a 200 and JSON of the form {"result": "success",
|
|
|
|
"msg": ""}.
|
|
|
|
"""
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
json = {'msg': "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, 200, json['msg'])
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "success")
|
|
|
|
# We have a msg key for consistency with errors, but it typically has an
|
|
|
|
# empty value.
|
|
|
|
self.assertIn("msg", json)
|
2017-08-29 06:33:10 +02:00
|
|
|
self.assertNotEqual(json["msg"], "Error parsing JSON in response!")
|
2016-11-10 19:30:09 +01:00
|
|
|
return json
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def get_json_error(self, result: HttpResponse, status_code: int=400) -> Dict[str, Any]:
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
json = {'msg': "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, status_code, msg=json.get('msg'))
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "error")
|
|
|
|
return json['msg']
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def assert_json_error(self, result: HttpResponse, msg: str, status_code: int=400) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Invalid POSTs return an error status code and JSON of the form
|
|
|
|
{"result": "error", "msg": "reason"}.
|
|
|
|
"""
|
|
|
|
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def assert_length(self, items: List[Any], count: int) -> None:
|
2017-10-06 23:28:22 +02:00
|
|
|
actual_count = len(items)
|
|
|
|
if actual_count != count: # nocoverage
|
|
|
|
print('ITEMS:\n')
|
|
|
|
for item in items:
|
|
|
|
print(item)
|
|
|
|
print("\nexpected length: %s\nactual length: %s" % (count, actual_count))
|
|
|
|
raise AssertionError('List is unexpected size!')
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def assert_json_error_contains(self, result: HttpResponse, msg_substring: str,
|
2017-11-27 05:27:04 +01:00
|
|
|
status_code: int=400) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def assert_in_response(self, substring: str, response: HttpResponse) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertIn(substring, response.content.decode('utf-8'))
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def assert_in_success_response(self, substrings: List[str],
|
2017-11-27 05:27:04 +01:00
|
|
|
response: HttpResponse) -> None:
|
2016-11-19 21:54:00 +01:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
decoded = response.content.decode('utf-8')
|
|
|
|
for substring in substrings:
|
|
|
|
self.assertIn(substring, decoded)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def assert_not_in_success_response(self, substrings: List[str],
|
2017-11-27 05:27:04 +01:00
|
|
|
response: HttpResponse) -> None:
|
2017-04-10 12:35:56 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
decoded = response.content.decode('utf-8')
|
|
|
|
for substring in substrings:
|
|
|
|
self.assertNotIn(substring, decoded)
|
|
|
|
|
2019-05-26 22:12:46 +02:00
|
|
|
def assert_logged_in_user_id(self, user_id: Optional[int]) -> None:
|
|
|
|
"""
|
|
|
|
Verifies the user currently logged in for the test client has the provided user_id.
|
|
|
|
Pass None to verify no user is logged in.
|
|
|
|
"""
|
|
|
|
self.assertEqual(get_session_dict_user(self.client.session), user_id)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def webhook_fixture_data(self, type: str, action: str, file_type: str='json') -> str:
|
2017-11-04 18:03:59 +01:00
|
|
|
fn = os.path.join(
|
|
|
|
os.path.dirname(__file__),
|
|
|
|
"../webhooks/%s/fixtures/%s.%s" % (type, action, file_type)
|
|
|
|
)
|
|
|
|
return open(fn).read()
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2019-03-26 12:46:54 +01:00
|
|
|
def fixture_file_name(self, file_name: str, type: str='') -> str:
|
|
|
|
return os.path.join(
|
2018-04-20 03:57:21 +02:00
|
|
|
os.path.dirname(__file__),
|
|
|
|
"../tests/fixtures/%s/%s" % (type, file_name)
|
|
|
|
)
|
2019-03-26 12:46:54 +01:00
|
|
|
|
|
|
|
def fixture_data(self, file_name: str, type: str='') -> str:
|
|
|
|
fn = self.fixture_file_name(file_name, type)
|
2018-04-20 03:57:21 +02:00
|
|
|
return open(fn).read()
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def make_stream(self, stream_name: str, realm: Optional[Realm]=None,
|
2018-04-27 01:00:26 +02:00
|
|
|
invite_only: Optional[bool]=False,
|
2018-05-02 17:36:26 +02:00
|
|
|
history_public_to_subscribers: Optional[bool]=None) -> Stream:
|
2016-11-10 19:30:09 +01:00
|
|
|
if realm is None:
|
2016-11-18 01:51:13 +01:00
|
|
|
realm = self.DEFAULT_REALM
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-03 00:07:08 +02:00
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
|
|
|
realm, invite_only, history_public_to_subscribers)
|
2018-05-02 17:36:26 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
try:
|
|
|
|
stream = Stream.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
name=stream_name,
|
|
|
|
invite_only=invite_only,
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2017-03-05 09:01:49 +01:00
|
|
|
except IntegrityError: # nocoverage -- this is for bugs in the tests
|
2016-11-10 19:30:09 +01:00
|
|
|
raise Exception('''
|
|
|
|
%s already exists
|
|
|
|
|
|
|
|
Please call make_stream with a stream name
|
|
|
|
that is not already in use.''' % (stream_name,))
|
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
stream.recipient = recipient
|
|
|
|
stream.save(update_fields=["recipient"])
|
2016-11-10 19:30:09 +01:00
|
|
|
return stream
|
|
|
|
|
2019-09-18 15:04:17 +02:00
|
|
|
INVALID_STREAM_ID = 999999
|
2019-10-22 07:14:46 +02:00
|
|
|
|
2019-09-18 15:04:17 +02:00
|
|
|
def get_stream_id(self, name: str, realm: Optional[Realm]=None) -> int:
|
|
|
|
if not realm:
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
try:
|
|
|
|
stream = get_realm_stream(name, realm.id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return self.INVALID_STREAM_ID
|
|
|
|
return stream.id
|
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
# Subscribe to a stream directly
|
2018-05-11 01:40:45 +02:00
|
|
|
def subscribe(self, user_profile: UserProfile, stream_name: str) -> Stream:
|
2017-08-25 06:01:29 +02:00
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
from_stream_creation = False
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
stream, from_stream_creation = create_stream_if_needed(user_profile.realm, stream_name)
|
|
|
|
bulk_add_subscriptions([stream], [user_profile], from_stream_creation=from_stream_creation)
|
|
|
|
return stream
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def unsubscribe(self, user_profile: UserProfile, stream_name: str) -> None:
|
2018-03-14 00:13:21 +01:00
|
|
|
client = get_client("website")
|
2017-08-25 06:23:11 +02:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2018-03-14 00:13:21 +01:00
|
|
|
bulk_remove_subscriptions([user_profile], [stream], client)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
# Subscribe to a stream by making an API request
|
2018-05-11 01:40:45 +02:00
|
|
|
def common_subscribe_to_streams(self, email: str, streams: Iterable[str],
|
2017-12-05 18:41:26 +01:00
|
|
|
extra_post_data: Dict[str, Any]={}, invite_only: bool=False,
|
|
|
|
**kwargs: Any) -> HttpResponse:
|
2016-11-10 19:30:09 +01:00
|
|
|
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
|
|
|
|
'invite_only': ujson.dumps(invite_only)}
|
|
|
|
post_data.update(extra_post_data)
|
2017-12-14 19:02:02 +01:00
|
|
|
result = self.api_post(email, "/api/v1/users/me/subscriptions", post_data, **kwargs)
|
2016-11-10 19:30:09 +01:00
|
|
|
return result
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def check_user_subscribed_only_to_streams(self, user_name: str,
|
2017-11-27 05:27:04 +01:00
|
|
|
streams: List[Stream]) -> None:
|
2017-11-16 22:12:31 +01:00
|
|
|
streams = sorted(streams, key=lambda x: x.name)
|
|
|
|
subscribed_streams = gather_subscriptions(self.nonreg_user(user_name))[0]
|
|
|
|
|
|
|
|
self.assertEqual(len(subscribed_streams), len(streams))
|
|
|
|
|
|
|
|
for x, y in zip(subscribed_streams, streams):
|
|
|
|
self.assertEqual(x["name"], y.name)
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def send_json_payload(self, user_profile: UserProfile, url: str,
|
|
|
|
payload: Union[str, Dict[str, Any]],
|
|
|
|
stream_name: Optional[str]=None, **post_params: Any) -> Message:
|
2016-11-10 19:30:09 +01:00
|
|
|
if stream_name is not None:
|
2017-08-25 06:37:47 +02:00
|
|
|
self.subscribe(user_profile, stream_name)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2019-01-28 20:57:54 +01:00
|
|
|
prior_msg = self.get_last_message()
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
result = self.client_post(url, payload, **post_params)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check the correct message was sent
|
|
|
|
msg = self.get_last_message()
|
2019-01-28 20:57:54 +01:00
|
|
|
|
|
|
|
if msg.id == prior_msg.id:
|
|
|
|
raise Exception('''
|
|
|
|
Your test code called an endpoint that did
|
|
|
|
not write any new messages. It is probably
|
|
|
|
broken (but still returns 200 due to exception
|
|
|
|
handling).
|
|
|
|
''') # nocoverage
|
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
self.assertEqual(msg.sender.email, user_profile.email)
|
2016-11-10 19:30:09 +01:00
|
|
|
if stream_name is not None:
|
|
|
|
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
|
|
|
|
# TODO: should also validate recipient for private messages
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def get_last_message(self) -> Message:
|
2016-11-10 19:30:09 +01:00
|
|
|
return Message.objects.latest('id')
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def get_second_to_last_message(self) -> Message:
|
2016-11-10 19:30:09 +01:00
|
|
|
return Message.objects.all().order_by('-id')[1]
|
|
|
|
|
|
|
|
@contextmanager
|
2017-11-27 05:27:04 +01:00
|
|
|
def simulated_markdown_failure(self) -> Iterator[None]:
|
2016-11-10 19:30:09 +01:00
|
|
|
'''
|
|
|
|
This raises a failure inside of the try/except block of
|
|
|
|
bugdown.__init__.do_convert.
|
|
|
|
'''
|
|
|
|
with \
|
|
|
|
self.settings(ERROR_BOT=None), \
|
|
|
|
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
|
2018-07-03 07:25:29 +02:00
|
|
|
mock.patch('zerver.lib.bugdown.bugdown_logger'):
|
2016-11-10 19:30:09 +01:00
|
|
|
yield
|
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
def create_default_device(self, user_profile: UserProfile,
|
|
|
|
number: str="+12223334444") -> None:
|
|
|
|
phone_device = PhoneDevice(user=user_profile, name='default',
|
|
|
|
confirmed=True, number=number,
|
|
|
|
key='abcd', method='sms')
|
|
|
|
phone_device.save()
|
|
|
|
|
2019-04-04 12:03:54 +02:00
|
|
|
def rm_tree(self, path: str) -> None:
|
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2019-04-04 12:05:54 +02:00
|
|
|
def make_import_output_dir(self, exported_from: str) -> str:
|
2019-07-06 02:07:56 +02:00
|
|
|
output_dir = tempfile.mkdtemp(dir=settings.TEST_WORKER_DIR,
|
2019-05-03 22:16:07 +02:00
|
|
|
prefix="test-" + exported_from + "-import-")
|
2019-04-04 12:05:54 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
return output_dir
|
|
|
|
|
2019-05-21 12:21:32 +02:00
|
|
|
def get_set(self, data: List[Dict[str, Any]], field: str) -> Set[str]:
|
|
|
|
values = set(r[field] for r in data)
|
|
|
|
return values
|
|
|
|
|
2019-05-21 12:29:09 +02:00
|
|
|
def find_by_id(self, data: List[Dict[str, Any]], db_id: int) -> Dict[str, Any]:
|
|
|
|
return [
|
|
|
|
r for r in data
|
|
|
|
if r['id'] == db_id][0]
|
|
|
|
|
2019-10-16 18:01:38 +02:00
|
|
|
def init_default_ldap_database(self) -> None:
|
|
|
|
"""
|
|
|
|
Takes care of the mock_ldap setup, loads
|
|
|
|
a directory from zerver/tests/fixtures/ldap/directory.json with various entries
|
|
|
|
to be used by tests.
|
|
|
|
If a test wants to specify its own directory, it can just replace
|
|
|
|
self.mock_ldap.directory with its own content, but in most cases it should be
|
|
|
|
enough to use change_user_attr to make simple modifications to the pre-loaded
|
|
|
|
directory. If new user entries are needed to test for some additional unusual
|
|
|
|
scenario, it's most likely best to add that to directory.json.
|
|
|
|
"""
|
|
|
|
directory = ujson.loads(self.fixture_data("directory.json", type="ldap"))
|
|
|
|
|
|
|
|
# Load binary attributes. If in "directory", an attribute as its value
|
|
|
|
# has a string starting with "file:", the rest of the string is assumed
|
|
|
|
# to be a path to the file from which binary data should be loaded,
|
|
|
|
# as the actual value of the attribute in ldap.
|
|
|
|
for dn, attrs in directory.items():
|
|
|
|
for attr, value in attrs.items():
|
|
|
|
if isinstance(value, str) and value.startswith("file:"):
|
|
|
|
with open(value[5:], 'rb') as f:
|
|
|
|
attrs[attr] = [f.read(), ]
|
|
|
|
|
|
|
|
ldap_patcher = mock.patch('django_auth_ldap.config.ldap.initialize')
|
|
|
|
self.mock_initialize = ldap_patcher.start()
|
|
|
|
self.mock_ldap = MockLDAP(directory)
|
|
|
|
self.mock_initialize.return_value = self.mock_ldap
|
|
|
|
|
|
|
|
def change_ldap_user_attr(self, username: str, attr_name: str, attr_value: Union[str, bytes],
|
|
|
|
binary: bool=False) -> None:
|
|
|
|
"""
|
|
|
|
Method for changing the value of an attribute of a user entry in the mock
|
|
|
|
directory. Use option binary=True if you want binary data to be loaded
|
|
|
|
into the attribute from a file specified at attr_value. This changes
|
|
|
|
the attribute only for the specific test function that calls this method,
|
|
|
|
and is isolated from other tests.
|
|
|
|
"""
|
|
|
|
dn = "uid={username},ou=users,dc=zulip,dc=com".format(username=username)
|
|
|
|
if binary:
|
|
|
|
with open(attr_value, "rb") as f:
|
|
|
|
# attr_value should be a path to the file with the binary data
|
|
|
|
data = f.read() # type: Union[str, bytes]
|
|
|
|
else:
|
|
|
|
data = attr_value
|
|
|
|
|
|
|
|
self.mock_ldap.directory[dn][attr_name] = [data, ]
|
|
|
|
|
2019-10-18 18:25:51 +02:00
|
|
|
def ldap_username(self, username: str) -> str:
|
|
|
|
"""
|
|
|
|
Maps zulip username to the name of the corresponding ldap user
|
|
|
|
in our test directory at zerver/tests/fixtures/ldap/directory.json,
|
|
|
|
if the ldap user exists.
|
|
|
|
"""
|
|
|
|
return self.example_user_ldap_username_map[username]
|
|
|
|
|
|
|
|
def ldap_password(self) -> str:
|
|
|
|
# Currently all ldap users have password "testing"
|
|
|
|
return "testing"
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
class WebhookTestCase(ZulipTestCase):
|
|
|
|
"""
|
|
|
|
Common for all webhooks tests
|
|
|
|
|
|
|
|
Override below class attributes and run send_and_test_message
|
|
|
|
If you create your url in uncommon way you can override build_webhook_url method
|
|
|
|
In case that you need modify body or create it without using fixture you can also override get_body method
|
|
|
|
"""
|
2018-05-11 01:40:45 +02:00
|
|
|
STREAM_NAME = None # type: Optional[str]
|
2016-11-10 19:30:09 +01:00
|
|
|
TEST_USER_EMAIL = 'webhook-bot@zulip.com'
|
2018-05-11 01:40:45 +02:00
|
|
|
URL_TEMPLATE = None # type: Optional[str]
|
|
|
|
FIXTURE_DIR_NAME = None # type: Optional[str]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
@property
|
2017-11-27 05:27:04 +01:00
|
|
|
def test_user(self) -> UserProfile:
|
2017-08-25 08:23:13 +02:00
|
|
|
return get_user(self.TEST_USER_EMAIL, get_realm("zulip"))
|
2017-08-25 06:37:47 +02:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2016-11-10 19:30:09 +01:00
|
|
|
self.url = self.build_webhook_url()
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def api_stream_message(self, email: str, *args: Any, **kwargs: Any) -> HttpResponse:
|
2017-12-15 00:15:32 +01:00
|
|
|
kwargs['HTTP_AUTHORIZATION'] = self.encode_credentials(email)
|
2017-12-14 19:02:02 +01:00
|
|
|
return self.send_and_test_stream_message(*args, **kwargs)
|
|
|
|
|
2018-11-01 22:31:24 +01:00
|
|
|
def send_and_test_stream_message(self, fixture_name: str, expected_topic: Optional[str]=None,
|
2018-05-11 01:40:45 +02:00
|
|
|
expected_message: Optional[str]=None,
|
|
|
|
content_type: Optional[str]="application/json", **kwargs: Any) -> Message:
|
2016-11-10 19:30:09 +01:00
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
2019-06-05 15:12:34 +02:00
|
|
|
headers = get_fixture_http_headers(self.FIXTURE_DIR_NAME, fixture_name)
|
2019-06-21 04:41:30 +02:00
|
|
|
headers = standardize_headers(headers)
|
2019-06-05 15:12:34 +02:00
|
|
|
kwargs.update(headers)
|
2017-08-25 06:37:47 +02:00
|
|
|
msg = self.send_json_payload(self.test_user, self.url, payload,
|
2016-11-10 19:30:09 +01:00
|
|
|
self.STREAM_NAME, **kwargs)
|
2018-11-01 22:31:24 +01:00
|
|
|
self.do_test_topic(msg, expected_topic)
|
2016-11-10 19:30:09 +01:00
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2018-11-01 22:31:24 +01:00
|
|
|
def send_and_test_private_message(self, fixture_name: str, expected_topic: str=None,
|
2018-05-11 01:40:45 +02:00
|
|
|
expected_message: str=None, content_type: str="application/json",
|
2019-01-31 14:32:37 +01:00
|
|
|
**kwargs: Any) -> Message:
|
2016-11-10 19:30:09 +01:00
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
2019-06-05 15:12:34 +02:00
|
|
|
headers = get_fixture_http_headers(self.FIXTURE_DIR_NAME, fixture_name)
|
2019-06-21 04:41:30 +02:00
|
|
|
headers = standardize_headers(headers)
|
2019-06-05 15:12:34 +02:00
|
|
|
kwargs.update(headers)
|
2018-03-22 21:43:28 +01:00
|
|
|
sender = kwargs.get('sender', self.test_user)
|
|
|
|
msg = self.send_json_payload(sender, self.url, payload,
|
2016-11-10 19:30:09 +01:00
|
|
|
stream_name=None, **kwargs)
|
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def build_webhook_url(self, *args: Any, **kwargs: Any) -> str:
|
2017-04-21 23:35:40 +02:00
|
|
|
url = self.URL_TEMPLATE
|
|
|
|
if url.find("api_key") >= 0:
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(self.test_user)
|
2017-04-21 23:35:40 +02:00
|
|
|
url = self.URL_TEMPLATE.format(api_key=api_key,
|
|
|
|
stream=self.STREAM_NAME)
|
|
|
|
else:
|
|
|
|
url = self.URL_TEMPLATE.format(stream=self.STREAM_NAME)
|
|
|
|
|
|
|
|
has_arguments = kwargs or args
|
|
|
|
if has_arguments and url.find('?') == -1:
|
2018-03-13 23:43:02 +01:00
|
|
|
url = "{}?".format(url) # nocoverage
|
2017-04-06 23:26:29 +02:00
|
|
|
else:
|
|
|
|
url = "{}&".format(url)
|
|
|
|
|
|
|
|
for key, value in kwargs.items():
|
|
|
|
url = "{}{}={}&".format(url, key, value)
|
|
|
|
|
2017-04-21 23:35:40 +02:00
|
|
|
for arg in args:
|
|
|
|
url = "{}{}&".format(url, arg)
|
|
|
|
|
|
|
|
return url[:-1] if has_arguments else url
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def get_body(self, fixture_name: str) -> Union[str, Dict[str, str]]:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""Can be implemented either as returning a dictionary containing the
|
|
|
|
post parameters or as string containing the body of the request."""
|
2018-04-20 03:57:21 +02:00
|
|
|
return ujson.dumps(ujson.loads(self.webhook_fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-11-01 22:31:24 +01:00
|
|
|
def do_test_topic(self, msg: Message, expected_topic: Optional[str]) -> None:
|
|
|
|
if expected_topic is not None:
|
|
|
|
self.assertEqual(msg.topic_name(), expected_topic)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def do_test_message(self, msg: Message, expected_message: Optional[str]) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
if expected_message is not None:
|
|
|
|
self.assertEqual(msg.content, expected_message)
|
2018-04-09 18:19:55 +02:00
|
|
|
|
2019-05-13 07:04:31 +02:00
|
|
|
class MigrationsTestCase(ZulipTestCase): # nocoverage
|
2018-05-21 18:56:45 +02:00
|
|
|
"""
|
|
|
|
Test class for database migrations inspired by this blog post:
|
|
|
|
https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
|
|
|
|
Documented at https://zulip.readthedocs.io/en/latest/subsystems/schema-migrations.html
|
|
|
|
"""
|
2018-04-09 18:19:55 +02:00
|
|
|
@property
|
|
|
|
def app(self) -> str:
|
|
|
|
return apps.get_containing_app_config(type(self).__module__).name
|
|
|
|
|
|
|
|
migrate_from = None # type: Optional[str]
|
|
|
|
migrate_to = None # type: Optional[str]
|
|
|
|
|
|
|
|
def setUp(self) -> None:
|
|
|
|
assert self.migrate_from and self.migrate_to, \
|
|
|
|
"TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__)
|
|
|
|
migrate_from = [(self.app, self.migrate_from)] # type: List[Tuple[str, str]]
|
|
|
|
migrate_to = [(self.app, self.migrate_to)] # type: List[Tuple[str, str]]
|
|
|
|
executor = MigrationExecutor(connection)
|
|
|
|
old_apps = executor.loader.project_state(migrate_from).apps
|
|
|
|
|
|
|
|
# Reverse to the original migration
|
|
|
|
executor.migrate(migrate_from)
|
|
|
|
|
|
|
|
self.setUpBeforeMigration(old_apps)
|
|
|
|
|
|
|
|
# Run the migration to test
|
|
|
|
executor = MigrationExecutor(connection)
|
|
|
|
executor.loader.build_graph() # reload.
|
|
|
|
executor.migrate(migrate_to)
|
|
|
|
|
|
|
|
self.apps = executor.loader.project_state(migrate_to).apps
|
|
|
|
|
|
|
|
def setUpBeforeMigration(self, apps: StateApps) -> None:
|
|
|
|
pass # nocoverage
|