2015-11-01 17:14:25 +01:00
|
|
|
from __future__ import absolute_import
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2016-06-12 00:47:19 +02:00
|
|
|
from typing import cast, Any, Callable, Generator, Iterable, Tuple, Sized, Union, Optional
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.test import TestCase
|
2016-05-20 14:53:47 +02:00
|
|
|
from django.template import loader
|
2016-06-03 02:10:13 +02:00
|
|
|
from django.http import HttpResponse
|
2016-05-18 20:35:35 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
from zerver.lib.initial_password import initial_password
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib.db import TimeTrackingCursor
|
2014-01-28 20:03:05 +01:00
|
|
|
from zerver.lib.handlers import allocate_handler_id
|
2016-06-16 01:23:52 +02:00
|
|
|
from zerver.lib.str_utils import force_text
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib import cache
|
2014-04-24 02:16:53 +02:00
|
|
|
from zerver.lib import event_queue
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.worker import queue_processors
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
from zerver.lib.actions import (
|
2014-01-28 23:04:28 +01:00
|
|
|
check_send_message, create_stream_if_needed, do_add_subscription,
|
2015-10-13 22:54:35 +02:00
|
|
|
get_display_recipient,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.models import (
|
2015-10-13 22:54:35 +02:00
|
|
|
get_realm,
|
2016-01-24 02:29:35 +01:00
|
|
|
get_stream,
|
2015-10-13 22:54:35 +02:00
|
|
|
get_user_profile_by_email,
|
2014-01-27 23:43:02 +01:00
|
|
|
resolve_email_to_domain,
|
|
|
|
Client,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
2016-06-03 02:10:13 +02:00
|
|
|
UserProfile,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
2016-05-18 20:35:35 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
|
|
|
|
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
import base64
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import time
|
|
|
|
import ujson
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2016-06-04 19:50:38 +02:00
|
|
|
from six import text_type
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
from contextlib import contextmanager
|
2015-11-01 17:14:25 +01:00
|
|
|
import six
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2016-01-25 20:38:44 +01:00
|
|
|
API_KEYS = {} # type: Dict[str, str]
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def stub(obj, name, f):
|
2016-01-25 20:38:44 +01:00
|
|
|
# type: (Any, str, Callable[..., Any]) -> Generator[None, None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
old_f = getattr(obj, name)
|
|
|
|
setattr(obj, name, f)
|
|
|
|
yield
|
|
|
|
setattr(obj, name, old_f)
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_queue_client(client):
|
2016-01-25 20:38:44 +01:00
|
|
|
# type: (Any) -> Generator[None, None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
real_SimpleQueueClient = queue_processors.SimpleQueueClient
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def tornado_redirected_to_list(lst):
|
2016-01-25 20:38:44 +01:00
|
|
|
# type: (List) -> Generator[None, None, None]
|
2014-04-24 02:16:53 +02:00
|
|
|
real_event_queue_process_notification = event_queue.process_notification
|
|
|
|
event_queue.process_notification = lst.append
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2014-04-24 02:16:53 +02:00
|
|
|
event_queue.process_notification = real_event_queue_process_notification
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_empty_cache():
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: () -> Generator[List[Tuple[str, Union[text_type, List[text_type]], text_type]], None, None]
|
|
|
|
cache_queries = [] # type: List[Tuple[str, Union[text_type, List[text_type]], text_type]]
|
2014-01-27 22:53:36 +01:00
|
|
|
def my_cache_get(key, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (text_type, Optional[str]) -> Any
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('get', key, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def my_cache_get_many(keys, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (List[text_type], Optional[str]) -> Dict[text_type, Any]
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('getmany', keys, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
old_get = cache.cache_get
|
|
|
|
old_get_many = cache.cache_get_many
|
|
|
|
cache.cache_get = my_cache_get
|
|
|
|
cache.cache_get_many = my_cache_get_many
|
|
|
|
yield cache_queries
|
|
|
|
cache.cache_get = old_get
|
|
|
|
cache.cache_get_many = old_get_many
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def queries_captured():
|
2016-01-25 20:38:44 +01:00
|
|
|
# type: () -> Generator[List[Dict[str, str]], None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
'''
|
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
|
|
|
'''
|
|
|
|
|
|
|
|
queries = []
|
|
|
|
|
|
|
|
def wrapper_execute(self, action, sql, params=()):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (TimeTrackingCursor, Callable, str, Iterable[Any]) -> None
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return action(sql, params)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
|
|
|
queries.append({
|
2016-06-03 02:10:13 +02:00
|
|
|
'sql': self.mogrify(sql, params),
|
|
|
|
'time': "%.3f" % duration,
|
|
|
|
})
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
old_execute = TimeTrackingCursor.execute
|
|
|
|
old_executemany = TimeTrackingCursor.executemany
|
|
|
|
|
|
|
|
def cursor_execute(self, sql, params=()):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (TimeTrackingCursor, str, Iterable[Any]) -> None
|
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
def cursor_executemany(self, sql, params=()):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (TimeTrackingCursor, str, Iterable[Any]) -> None
|
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).executemany, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
yield queries
|
|
|
|
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
|
|
|
TimeTrackingCursor.executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
def find_key_by_email(address):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type) -> text_type
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
|
|
|
key_regex = re.compile("accounts/do_confirm/([a-f0-9]{40})>")
|
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
|
|
|
return key_regex.search(message.body).groups()[0]
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
def message_ids(result):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Dict[str, Any]) -> Set[int]
|
2014-01-31 16:44:45 +01:00
|
|
|
return set(message['id'] for message in result['messages'])
|
|
|
|
|
|
|
|
def message_stream_count(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> int
|
2014-01-31 16:44:45 +01:00
|
|
|
return UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
count()
|
|
|
|
|
|
|
|
def most_recent_usermessage(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> UserMessage
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('-message')
|
|
|
|
return query[0] # Django does LIMIT here
|
|
|
|
|
|
|
|
def most_recent_message(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> Message
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
|
|
|
def get_user_messages(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> List[Message]
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('message')
|
|
|
|
return [um.message for um in query]
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class DummyObject(object):
|
2014-01-31 16:44:45 +01:00
|
|
|
pass
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class DummyTornadoRequest(object):
|
2014-01-31 16:44:45 +01:00
|
|
|
def __init__(self):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
self.connection = DummyObject()
|
2016-01-25 23:42:16 +01:00
|
|
|
self.connection.stream = DummyStream() # type: ignore # monkey-patching here
|
2014-01-31 16:44:45 +01:00
|
|
|
|
|
|
|
class DummyHandler(object):
|
|
|
|
def __init__(self, assert_callback):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Any) -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
self.assert_callback = assert_callback
|
|
|
|
self.request = DummyTornadoRequest()
|
2014-01-28 17:10:10 +01:00
|
|
|
allocate_handler_id(self)
|
2014-01-31 16:44:45 +01:00
|
|
|
|
|
|
|
# Mocks RequestHandler.async_callback, which wraps a callback to
|
|
|
|
# handle exceptions. We return the callback as-is.
|
|
|
|
def async_callback(self, cb):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Callable) -> Callable
|
2014-01-31 16:44:45 +01:00
|
|
|
return cb
|
|
|
|
|
|
|
|
def write(self, response):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str) -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
raise NotImplemented
|
|
|
|
|
|
|
|
def zulip_finish(self, response, *ignore):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse, *Any) -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
if self.assert_callback:
|
|
|
|
self.assert_callback(response)
|
|
|
|
|
|
|
|
|
|
|
|
class DummySession(object):
|
|
|
|
session_key = "0"
|
|
|
|
|
2015-10-14 22:43:04 +02:00
|
|
|
class DummyStream(object):
|
2014-01-31 16:44:45 +01:00
|
|
|
def closed(self):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> bool
|
2014-01-31 16:44:45 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
class POSTRequestMock(object):
|
|
|
|
method = "POST"
|
|
|
|
|
|
|
|
def __init__(self, post_data, user_profile, assert_callback=None):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Dict[str, Any], UserProfile, Optional[Callable]) -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
self.REQUEST = self.POST = post_data
|
|
|
|
self.user = user_profile
|
|
|
|
self._tornado_handler = DummyHandler(assert_callback)
|
|
|
|
self.session = DummySession()
|
2016-01-25 23:42:16 +01:00
|
|
|
self._log_data = {} # type: Dict[str, Any]
|
2014-01-31 16:44:45 +01:00
|
|
|
self.META = {'PATH_INFO': 'test'}
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
class AuthedTestCase(TestCase):
|
|
|
|
# Helper because self.client.patch annoying requires you to urlencode
|
2016-06-03 02:10:13 +02:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_patch(self, url, info={}, **kwargs):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
|
|
|
return self.client.patch(url, encoded, **kwargs)
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_put(self, url, info={}, **kwargs):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
|
|
|
return self.client.put(url, encoded, **kwargs)
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_delete(self, url, info={}, **kwargs):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
encoded = urllib.parse.urlencode(info)
|
|
|
|
return self.client.delete(url, encoded, **kwargs)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-06-27 20:36:04 +02:00
|
|
|
def login_with_return(self, email, password=None):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, Optional[text_type]) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
|
|
|
return self.client.post('/accounts/login/',
|
2016-04-28 07:22:13 +02:00
|
|
|
{'username': email, 'password': password})
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-06-27 20:45:41 +02:00
|
|
|
def login(self, email, password=None, fails=False):
|
2016-06-27 20:36:04 +02:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
2016-06-27 20:45:41 +02:00
|
|
|
if not fails:
|
|
|
|
self.assertTrue(self.client.login(username=email, password=password))
|
|
|
|
else:
|
|
|
|
self.assertFalse(self.client.login(username=email, password=password))
|
2016-06-27 20:36:04 +02:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def register(self, username, password, domain="zulip.com"):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, text_type, text_type) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
self.client.post('/accounts/home/',
|
|
|
|
{'email': username + "@" + domain})
|
|
|
|
return self.submit_reg_form_for_user(username, password, domain=domain)
|
|
|
|
|
|
|
|
def submit_reg_form_for_user(self, username, password, domain="zulip.com"):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, text_type, text_type) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Stage two of the two-step registration process.
|
|
|
|
|
|
|
|
If things are working correctly the account should be fully
|
|
|
|
registered after this call.
|
|
|
|
"""
|
|
|
|
return self.client.post('/accounts/register/',
|
|
|
|
{'full_name': username, 'password': password,
|
|
|
|
'key': find_key_by_email(username + '@' + domain),
|
|
|
|
'terms': True})
|
|
|
|
|
|
|
|
def get_api_key(self, email):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str) -> str
|
2014-01-27 23:43:02 +01:00
|
|
|
if email not in API_KEYS:
|
2016-06-03 02:10:13 +02:00
|
|
|
API_KEYS[email] = get_user_profile_by_email(email).api_key
|
2014-01-27 23:43:02 +01:00
|
|
|
return API_KEYS[email]
|
|
|
|
|
|
|
|
def api_auth(self, email):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str) -> Dict[str, str]
|
2014-01-27 23:43:02 +01:00
|
|
|
credentials = "%s:%s" % (email, self.get_api_key(email))
|
|
|
|
return {
|
|
|
|
'HTTP_AUTHORIZATION': 'Basic ' + base64.b64encode(credentials)
|
2016-06-03 02:10:13 +02:00
|
|
|
}
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
def get_streams(self, email):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type) -> List[text_type]
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Helper function to get the stream names for a user
|
|
|
|
"""
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
subs = Subscription.objects.filter(
|
2016-06-03 02:10:13 +02:00
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM)
|
2016-06-12 00:47:19 +02:00
|
|
|
return [cast(text_type, get_display_recipient(sub.recipient)) for sub in subs]
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-06-05 21:18:15 +02:00
|
|
|
def send_message(self, sender_name, raw_recipients, message_type,
|
|
|
|
content=u"test content", subject=u"test", **kwargs):
|
|
|
|
# type: (str, Union[text_type, List[text_type]], int, text_type, text_type, **Any) -> int
|
2014-01-27 23:43:02 +01:00
|
|
|
sender = get_user_profile_by_email(sender_name)
|
|
|
|
if message_type == Recipient.PERSONAL:
|
|
|
|
message_type_name = "private"
|
|
|
|
else:
|
|
|
|
message_type_name = "stream"
|
2016-06-05 21:18:15 +02:00
|
|
|
if isinstance(raw_recipients, six.string_types):
|
|
|
|
recipient_list = [raw_recipients]
|
|
|
|
else:
|
|
|
|
recipient_list = raw_recipients
|
2014-01-27 23:43:02 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
|
|
|
|
|
|
|
return check_send_message(
|
|
|
|
sender, sending_client, message_type_name, recipient_list, subject,
|
|
|
|
content, forged=False, forged_timestamp=None,
|
|
|
|
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
|
|
|
|
|
|
|
|
def get_old_messages(self, anchor=1, num_before=100, num_after=100):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (int, int, int) -> List[Dict[str, Any]]
|
2014-01-27 23:43:02 +01:00
|
|
|
post_params = {"anchor": anchor, "num_before": num_before,
|
|
|
|
"num_after": num_after}
|
2016-04-01 21:55:17 +02:00
|
|
|
result = self.client.get("/json/messages", dict(post_params))
|
2014-01-27 23:43:02 +01:00
|
|
|
data = ujson.loads(result.content)
|
|
|
|
return data['messages']
|
|
|
|
|
|
|
|
def users_subscribed_to_stream(self, stream_name, realm_domain):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, text_type) -> List[UserProfile]
|
2015-10-13 22:54:35 +02:00
|
|
|
realm = get_realm(realm_domain)
|
2014-01-27 23:43:02 +01:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
2014-01-29 18:54:48 +01:00
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
|
|
|
def assert_json_success(self, result):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse) -> Dict[str, Any]
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Successful POSTs return a 200 and JSON of the form {"result": "success",
|
|
|
|
"msg": ""}.
|
|
|
|
"""
|
|
|
|
self.assertEqual(result.status_code, 200, result)
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
self.assertEqual(json.get("result"), "success")
|
|
|
|
# We have a msg key for consistency with errors, but it typically has an
|
|
|
|
# empty value.
|
|
|
|
self.assertIn("msg", json)
|
2014-01-30 22:50:51 +01:00
|
|
|
return json
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2014-01-31 23:12:10 +01:00
|
|
|
def get_json_error(self, result, status_code=400):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse, int) -> Dict[str, Any]
|
2014-01-31 23:12:10 +01:00
|
|
|
self.assertEqual(result.status_code, status_code)
|
2014-01-27 23:43:02 +01:00
|
|
|
json = ujson.loads(result.content)
|
|
|
|
self.assertEqual(json.get("result"), "error")
|
|
|
|
return json['msg']
|
|
|
|
|
2014-01-31 23:12:10 +01:00
|
|
|
def assert_json_error(self, result, msg, status_code=400):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse, str, int) -> None
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
2014-01-31 23:12:10 +01:00
|
|
|
Invalid POSTs return an error status code and JSON of the form
|
|
|
|
{"result": "error", "msg": "reason"}.
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
2014-01-31 23:12:10 +01:00
|
|
|
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
def assert_length(self, queries, count, exact=False):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Sized, int, bool) -> None
|
2014-03-02 06:46:54 +01:00
|
|
|
actual_count = len(queries)
|
2014-01-27 23:43:02 +01:00
|
|
|
if exact:
|
2014-03-02 06:46:54 +01:00
|
|
|
return self.assertTrue(actual_count == count,
|
|
|
|
"len(%s) == %s, != %s" % (queries, actual_count, count))
|
|
|
|
return self.assertTrue(actual_count <= count,
|
|
|
|
"len(%s) == %s, > %s" % (queries, actual_count, count))
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-04-21 21:37:31 +02:00
|
|
|
def assert_json_error_contains(self, result, msg_substring, status_code=400):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse, str, int) -> None
|
2016-04-21 21:37:31 +02:00
|
|
|
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
def fixture_data(self, type, action, file_type='json'):
|
2016-06-16 01:23:52 +02:00
|
|
|
# type: (text_type, text_type, text_type) -> text_type
|
|
|
|
return force_text(open(os.path.join(os.path.dirname(__file__),
|
|
|
|
"../fixtures/%s/%s_%s.%s" % (type, type, action, file_type))).read())
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
# Subscribe to a stream directly
|
|
|
|
def subscribe_to_stream(self, email, stream_name, realm=None):
|
2016-06-13 10:14:33 +02:00
|
|
|
# type: (text_type, text_type, Optional[Realm]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
if realm is None:
|
|
|
|
realm = get_realm(resolve_email_to_domain(email))
|
2016-01-24 02:29:35 +01:00
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
if stream is None:
|
|
|
|
stream, _ = create_stream_if_needed(realm, stream_name)
|
2014-01-27 23:43:02 +01:00
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
do_add_subscription(user_profile, stream, no_log=True)
|
|
|
|
|
|
|
|
# Subscribe to a stream by making an API request
|
2016-06-03 02:10:13 +02:00
|
|
|
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (str, Iterable[text_type], Dict[str, Any], bool) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
|
|
|
|
'invite_only': ujson.dumps(invite_only)}
|
|
|
|
post_data.update(extra_post_data)
|
|
|
|
result = self.client.post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
|
|
|
|
return result
|
|
|
|
|
|
|
|
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
|
2016-06-16 01:26:38 +02:00
|
|
|
# type: (text_type, text_type, Union[text_type, Dict[str, Any]], Optional[text_type], **Any) -> Message
|
2016-06-03 02:10:13 +02:00
|
|
|
if stream_name is not None:
|
2014-01-27 23:43:02 +01:00
|
|
|
self.subscribe_to_stream(email, stream_name)
|
|
|
|
|
|
|
|
result = self.client.post(url, payload, **post_params)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check the correct message was sent
|
2016-05-16 23:37:53 +02:00
|
|
|
msg = self.get_last_message()
|
2014-01-27 23:43:02 +01:00
|
|
|
self.assertEqual(msg.sender.email, email)
|
|
|
|
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2016-04-13 23:59:08 +02:00
|
|
|
def get_last_message(self):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> Message
|
2016-04-13 23:59:08 +02:00
|
|
|
return Message.objects.latest('id')
|
2016-05-20 14:53:47 +02:00
|
|
|
|
2016-05-25 20:42:28 +02:00
|
|
|
def get_second_to_last_message(self):
|
|
|
|
return Message.objects.all().order_by('-id')[1]
|
|
|
|
|
2016-05-20 14:53:47 +02:00
|
|
|
def get_all_templates():
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> List[str]
|
2016-05-20 14:53:47 +02:00
|
|
|
templates = []
|
|
|
|
|
|
|
|
relpath = os.path.relpath
|
|
|
|
isfile = os.path.isfile
|
|
|
|
path_exists = os.path.exists
|
|
|
|
|
|
|
|
is_valid_template = lambda p, n: not n.startswith('.') and isfile(p)
|
|
|
|
|
|
|
|
def process(template_dir, dirname, fnames):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, str, Iterable[str]) -> None
|
2016-05-20 14:53:47 +02:00
|
|
|
for name in fnames:
|
|
|
|
path = os.path.join(dirname, name)
|
|
|
|
if is_valid_template(path, name):
|
|
|
|
templates.append(relpath(path, template_dir))
|
|
|
|
|
|
|
|
for engine in loader.engines.all():
|
|
|
|
template_dirs = [d for d in engine.template_dirs if path_exists(d)]
|
|
|
|
for template_dir in template_dirs:
|
|
|
|
template_dir = os.path.normpath(template_dir)
|
|
|
|
os.path.walk(template_dir, process, template_dir)
|
|
|
|
|
|
|
|
return templates
|