2015-11-01 17:14:25 +01:00
|
|
|
from __future__ import absolute_import
|
2016-07-28 01:40:28 +02:00
|
|
|
from __future__ import print_function
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2016-07-03 19:13:25 +02:00
|
|
|
from typing import (cast, Any, Callable, Dict, Generator, Iterable, List, Mapping, Optional,
|
|
|
|
Sized, Tuple, Union)
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2016-09-23 04:23:48 +02:00
|
|
|
from django.conf import settings
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.test import TestCase
|
2016-07-14 00:21:01 +02:00
|
|
|
from django.test.client import (
|
|
|
|
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
|
|
|
|
)
|
2016-05-20 14:53:47 +02:00
|
|
|
from django.template import loader
|
2016-06-03 02:10:13 +02:00
|
|
|
from django.http import HttpResponse
|
2016-05-18 20:35:35 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
from zerver.lib.initial_password import initial_password
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib.db import TimeTrackingCursor
|
2014-01-28 20:03:05 +01:00
|
|
|
from zerver.lib.handlers import allocate_handler_id
|
2016-06-16 01:23:52 +02:00
|
|
|
from zerver.lib.str_utils import force_text
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib import cache
|
2014-04-24 02:16:53 +02:00
|
|
|
from zerver.lib import event_queue
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.worker import queue_processors
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
from zerver.lib.actions import (
|
2016-10-20 00:08:03 +02:00
|
|
|
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
|
2016-10-20 16:49:29 +02:00
|
|
|
get_display_recipient, bulk_remove_subscriptions
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.models import (
|
2015-10-13 22:54:35 +02:00
|
|
|
get_realm,
|
2016-01-24 02:29:35 +01:00
|
|
|
get_stream,
|
2015-10-13 22:54:35 +02:00
|
|
|
get_user_profile_by_email,
|
2014-01-27 23:43:02 +01:00
|
|
|
resolve_email_to_domain,
|
|
|
|
Client,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
2016-06-03 02:10:13 +02:00
|
|
|
UserProfile,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
2016-05-18 20:35:35 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
|
|
|
|
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
import base64
|
2016-09-15 22:05:56 +02:00
|
|
|
import mock
|
2014-01-27 22:53:36 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import time
|
|
|
|
import ujson
|
2016-07-12 06:35:50 +02:00
|
|
|
import unittest
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2016-06-28 07:10:38 +02:00
|
|
|
from six import text_type, binary_type
|
|
|
|
from zerver.lib.str_utils import NonBinaryStr
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
from contextlib import contextmanager
|
2015-11-01 17:14:25 +01:00
|
|
|
import six
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2016-06-28 22:20:38 +02:00
|
|
|
API_KEYS = {} # type: Dict[text_type, text_type]
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2016-07-12 06:35:50 +02:00
|
|
|
skip_py3 = unittest.skipIf(six.PY3, "Expected failure on Python 3")
|
|
|
|
|
2016-09-15 22:05:56 +02:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
|
|
|
def simulated_queue_client(client):
|
2016-06-28 22:05:34 +02:00
|
|
|
# type: (type) -> Generator[None, None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
real_SimpleQueueClient = queue_processors.SimpleQueueClient
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def tornado_redirected_to_list(lst):
|
2016-07-03 19:13:25 +02:00
|
|
|
# type: (List[Mapping[str, Any]]) -> Generator[None, None, None]
|
2014-04-24 02:16:53 +02:00
|
|
|
real_event_queue_process_notification = event_queue.process_notification
|
|
|
|
event_queue.process_notification = lst.append
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2014-04-24 02:16:53 +02:00
|
|
|
event_queue.process_notification = real_event_queue_process_notification
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_empty_cache():
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: () -> Generator[List[Tuple[str, Union[text_type, List[text_type]], text_type]], None, None]
|
|
|
|
cache_queries = [] # type: List[Tuple[str, Union[text_type, List[text_type]], text_type]]
|
2014-01-27 22:53:36 +01:00
|
|
|
def my_cache_get(key, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (text_type, Optional[str]) -> Any
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('get', key, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def my_cache_get_many(keys, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (List[text_type], Optional[str]) -> Dict[text_type, Any]
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('getmany', keys, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
old_get = cache.cache_get
|
|
|
|
old_get_many = cache.cache_get_many
|
|
|
|
cache.cache_get = my_cache_get
|
|
|
|
cache.cache_get_many = my_cache_get_many
|
|
|
|
yield cache_queries
|
|
|
|
cache.cache_get = old_get
|
|
|
|
cache.cache_get_many = old_get_many
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def queries_captured():
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: () -> Generator[List[Dict[str, Union[str, binary_type]]], None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
'''
|
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
|
|
|
'''
|
|
|
|
|
2016-06-28 07:10:38 +02:00
|
|
|
queries = [] # type: List[Dict[str, Union[str, binary_type]]]
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
def wrapper_execute(self, action, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, Callable, NonBinaryStr, Iterable[Any]) -> None
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return action(sql, params)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
|
|
|
queries.append({
|
2016-07-13 15:48:31 +02:00
|
|
|
'sql': self.mogrify(sql, params).decode('utf-8'),
|
2016-06-03 02:10:13 +02:00
|
|
|
'time': "%.3f" % duration,
|
|
|
|
})
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
old_execute = TimeTrackingCursor.execute
|
|
|
|
old_executemany = TimeTrackingCursor.executemany
|
|
|
|
|
|
|
|
def cursor_execute(self, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
def cursor_executemany(self, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).executemany, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
yield queries
|
|
|
|
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
|
|
|
TimeTrackingCursor.executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-13 23:32:35 +02:00
|
|
|
def make_client(name):
|
|
|
|
# type: (str) -> Client
|
|
|
|
client, _ = Client.objects.get_or_create(name=name)
|
|
|
|
return client
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def find_key_by_email(address):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type) -> text_type
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
|
|
|
key_regex = re.compile("accounts/do_confirm/([a-f0-9]{40})>")
|
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
|
|
|
return key_regex.search(message.body).groups()[0]
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
def message_ids(result):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Dict[str, Any]) -> Set[int]
|
2014-01-31 16:44:45 +01:00
|
|
|
return set(message['id'] for message in result['messages'])
|
|
|
|
|
|
|
|
def message_stream_count(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> int
|
2014-01-31 16:44:45 +01:00
|
|
|
return UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
count()
|
|
|
|
|
|
|
|
def most_recent_usermessage(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> UserMessage
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('-message')
|
|
|
|
return query[0] # Django does LIMIT here
|
|
|
|
|
|
|
|
def most_recent_message(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> Message
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
|
|
|
def get_user_messages(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> List[Message]
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('message')
|
|
|
|
return [um.message for um in query]
|
|
|
|
|
|
|
|
class DummyHandler(object):
|
2016-07-14 01:28:40 +02:00
|
|
|
def __init__(self):
|
2016-06-28 22:05:34 +02:00
|
|
|
# type: (Callable) -> None
|
2014-01-28 17:10:10 +01:00
|
|
|
allocate_handler_id(self)
|
2014-01-31 16:44:45 +01:00
|
|
|
|
|
|
|
class POSTRequestMock(object):
|
|
|
|
method = "POST"
|
|
|
|
|
2016-07-14 01:28:40 +02:00
|
|
|
def __init__(self, post_data, user_profile):
|
|
|
|
# type: (Dict[str, Any], UserProfile) -> None
|
2014-01-31 16:44:45 +01:00
|
|
|
self.REQUEST = self.POST = post_data
|
|
|
|
self.user = user_profile
|
2016-07-14 01:28:40 +02:00
|
|
|
self._tornado_handler = DummyHandler()
|
2016-01-25 23:42:16 +01:00
|
|
|
self._log_data = {} # type: Dict[str, Any]
|
2014-01-31 16:44:45 +01:00
|
|
|
self.META = {'PATH_INFO': 'test'}
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-28 06:06:21 +02:00
|
|
|
class HostRequestMock(object):
|
|
|
|
"""A mock request object where get_host() works. Useful for testing
|
|
|
|
routes that use Zulip's subdomains feature"""
|
|
|
|
def __init__(self, host=settings.EXTERNAL_HOST):
|
|
|
|
# type: (text_type) -> None
|
|
|
|
self.host = host
|
|
|
|
|
|
|
|
def get_host(self):
|
|
|
|
# type: () -> text_type
|
|
|
|
return self.host
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
INSTRUMENTING = os.environ.get('TEST_INSTRUMENT_URL_COVERAGE', '') == 'TRUE'
|
2016-09-12 17:29:34 +02:00
|
|
|
INSTRUMENTED_CALLS = [] # type: List[Dict[str, Any]]
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2016-09-12 03:06:25 +02:00
|
|
|
UrlFuncT = Callable[..., HttpResponse] # TODO: make more specific
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
def instrument_url(f):
|
2016-09-12 03:06:25 +02:00
|
|
|
# type: (UrlFuncT) -> UrlFuncT
|
2016-07-28 01:40:28 +02:00
|
|
|
if not INSTRUMENTING:
|
|
|
|
return f
|
|
|
|
else:
|
|
|
|
def wrapper(self, url, info={}, **kwargs):
|
2016-09-12 17:29:34 +02:00
|
|
|
# type: (Any, text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-07-28 01:40:28 +02:00
|
|
|
start = time.time()
|
|
|
|
result = f(self, url, info, **kwargs)
|
|
|
|
delay = time.time() - start
|
|
|
|
test_name = self.id()
|
|
|
|
if '?' in url:
|
|
|
|
url, extra_info = url.split('?', 1)
|
|
|
|
else:
|
|
|
|
extra_info = ''
|
|
|
|
|
|
|
|
INSTRUMENTED_CALLS.append(dict(
|
|
|
|
url=url,
|
|
|
|
status_code=result.status_code,
|
|
|
|
method=f.__name__,
|
|
|
|
delay=delay,
|
|
|
|
extra_info=extra_info,
|
|
|
|
info=info,
|
|
|
|
test_name=test_name,
|
|
|
|
kwargs=kwargs))
|
|
|
|
return result
|
|
|
|
return wrapper
|
|
|
|
|
2016-07-28 02:40:04 +02:00
|
|
|
def write_instrumentation_reports():
|
|
|
|
# type: () -> None
|
2016-07-28 01:40:28 +02:00
|
|
|
if INSTRUMENTING:
|
2016-07-28 02:40:04 +02:00
|
|
|
calls = INSTRUMENTED_CALLS
|
2016-07-28 01:40:28 +02:00
|
|
|
var_dir = 'var' # TODO make sure path is robust here
|
|
|
|
fn = os.path.join(var_dir, 'url_coverage.txt')
|
|
|
|
with open(fn, 'w') as f:
|
2016-07-28 02:40:04 +02:00
|
|
|
for call in calls:
|
2016-08-18 17:10:15 +02:00
|
|
|
try:
|
|
|
|
line = ujson.dumps(call)
|
|
|
|
f.write(line + '\n')
|
|
|
|
except OverflowError:
|
|
|
|
print('''
|
|
|
|
A JSON overflow error was encountered while
|
|
|
|
producing the URL coverage report. Sometimes
|
|
|
|
this indicates that a test is passing objects
|
|
|
|
into methods like client_post(), which is
|
|
|
|
unnecessary and leads to false positives.
|
|
|
|
''')
|
|
|
|
print(call)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
print('URL coverage report is in %s' % (fn,))
|
2016-07-28 01:41:28 +02:00
|
|
|
print('Try running: ./tools/analyze-url-coverage')
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2016-07-28 02:40:04 +02:00
|
|
|
# Find our untested urls.
|
|
|
|
from zproject.urls import urlpatterns
|
|
|
|
untested_patterns = []
|
|
|
|
for pattern in urlpatterns:
|
|
|
|
for call in calls:
|
|
|
|
url = call['url']
|
|
|
|
if url.startswith('/'):
|
|
|
|
url = url[1:]
|
|
|
|
if pattern.regex.match(url):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
untested_patterns.append(pattern.regex.pattern)
|
|
|
|
|
|
|
|
fn = os.path.join(var_dir, 'untested_url_report.txt')
|
|
|
|
with open(fn, 'w') as f:
|
|
|
|
f.write('untested urls\n')
|
|
|
|
for untested_pattern in sorted(untested_patterns):
|
|
|
|
f.write(' %s\n' % (untested_pattern,))
|
|
|
|
print('Untested-url report is in %s' % (fn,))
|
|
|
|
|
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class ZulipTestCase(TestCase):
|
2016-07-28 04:48:03 +02:00
|
|
|
'''
|
|
|
|
WRAPPER_COMMENT:
|
|
|
|
|
|
|
|
We wrap calls to self.client.{patch,put,get,post,delete} for various
|
|
|
|
reasons. Some of this has to do with fixing encodings before calling
|
|
|
|
into the Django code. Some of this has to do with providing a future
|
|
|
|
path for instrumentation. Some of it's just consistency.
|
|
|
|
|
|
|
|
The linter will prevent direct calls to self.client.foo, so the wrapper
|
|
|
|
functions have to fake out the linter by using a local variable called
|
|
|
|
django_client to fool the regext.
|
|
|
|
'''
|
2016-06-03 02:10:13 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_patch(self, url, info={}, **kwargs):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-07-28 04:48:03 +02:00
|
|
|
"""
|
|
|
|
We need to urlencode, since Django's function won't do it for us.
|
|
|
|
"""
|
2016-06-03 02:10:13 +02:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.patch(url, encoded, **kwargs)
|
2016-07-14 00:21:01 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2016-07-14 00:21:01 +02:00
|
|
|
def client_patch_multipart(self, url, info={}, **kwargs):
|
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
|
|
|
"""
|
|
|
|
Use this for patch requests that have file uploads or
|
|
|
|
that need some sort of multi-part content. In the future
|
|
|
|
Django's test client may become a bit more flexible,
|
|
|
|
so we can hopefully eliminate this. (When you post
|
|
|
|
with the Django test client, it deals with MULTIPART_CONTENT
|
|
|
|
automatically, but not patch.)
|
|
|
|
"""
|
|
|
|
encoded = encode_multipart(BOUNDARY, info)
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.patch(
|
2016-07-14 00:21:01 +02:00
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
content_type=MULTIPART_CONTENT,
|
|
|
|
**kwargs)
|
2016-06-03 02:10:13 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_put(self, url, info={}, **kwargs):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-06-03 02:10:13 +02:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.put(url, encoded, **kwargs)
|
2016-06-03 02:10:13 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2014-01-27 23:43:02 +01:00
|
|
|
def client_delete(self, url, info={}, **kwargs):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-06-03 02:10:13 +02:00
|
|
|
encoded = urllib.parse.urlencode(info)
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.delete(url, encoded, **kwargs)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2016-07-28 00:30:22 +02:00
|
|
|
def client_post(self, url, info={}, **kwargs):
|
2016-09-11 19:03:31 +02:00
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.post(url, info, **kwargs)
|
2016-07-28 00:30:22 +02:00
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
@instrument_url
|
2016-07-28 00:38:45 +02:00
|
|
|
def client_get(self, url, info={}, **kwargs):
|
2016-09-11 19:03:31 +02:00
|
|
|
# type: (text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-07-28 04:48:03 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
|
|
|
return django_client.get(url, info, **kwargs)
|
2016-07-28 00:38:45 +02:00
|
|
|
|
2016-06-27 20:36:04 +02:00
|
|
|
def login_with_return(self, email, password=None):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, Optional[text_type]) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
2016-07-28 00:30:22 +02:00
|
|
|
return self.client_post('/accounts/login/',
|
2016-04-28 07:22:13 +02:00
|
|
|
{'username': email, 'password': password})
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-06-27 20:45:41 +02:00
|
|
|
def login(self, email, password=None, fails=False):
|
2016-07-30 00:59:39 +02:00
|
|
|
# type: (text_type, Optional[text_type], bool) -> HttpResponse
|
2016-06-27 20:36:04 +02:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
2016-06-27 20:45:41 +02:00
|
|
|
if not fails:
|
|
|
|
self.assertTrue(self.client.login(username=email, password=password))
|
|
|
|
else:
|
|
|
|
self.assertFalse(self.client.login(username=email, password=password))
|
2016-06-27 20:36:04 +02:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def register(self, username, password, domain="zulip.com"):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, text_type, text_type) -> HttpResponse
|
2016-07-28 00:30:22 +02:00
|
|
|
self.client_post('/accounts/home/',
|
2014-01-27 23:43:02 +01:00
|
|
|
{'email': username + "@" + domain})
|
|
|
|
return self.submit_reg_form_for_user(username, password, domain=domain)
|
|
|
|
|
2016-07-19 14:35:08 +02:00
|
|
|
def submit_reg_form_for_user(self, username, password, domain="zulip.com",
|
2016-09-16 19:05:14 +02:00
|
|
|
realm_name=None, realm_subdomain=None,
|
|
|
|
realm_org_type=Realm.COMMUNITY, **kwargs):
|
|
|
|
# type: (text_type, text_type, text_type, Optional[text_type], Optional[text_type], int, **Any) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Stage two of the two-step registration process.
|
|
|
|
|
|
|
|
If things are working correctly the account should be fully
|
|
|
|
registered after this call.
|
2016-07-19 14:35:08 +02:00
|
|
|
|
|
|
|
You can pass the HTTP_HOST variable for subdomains via kwargs.
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
2016-07-28 00:30:22 +02:00
|
|
|
return self.client_post('/accounts/register/',
|
2014-01-27 23:43:02 +01:00
|
|
|
{'full_name': username, 'password': password,
|
2016-07-19 14:35:08 +02:00
|
|
|
'realm_name': realm_name,
|
|
|
|
'realm_subdomain': realm_subdomain,
|
2014-01-27 23:43:02 +01:00
|
|
|
'key': find_key_by_email(username + '@' + domain),
|
2016-09-16 19:05:14 +02:00
|
|
|
'realm_org_type': realm_org_type,
|
2016-07-19 14:35:08 +02:00
|
|
|
'terms': True},
|
|
|
|
**kwargs)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-23 04:23:48 +02:00
|
|
|
def get_confirmation_url_from_outbox(self, email_address, path_pattern="(\S+)>"):
|
|
|
|
# type: (text_type, text_type) -> text_type
|
|
|
|
from django.core.mail import outbox
|
|
|
|
for message in reversed(outbox):
|
|
|
|
if email_address in message.to:
|
|
|
|
return re.search(settings.EXTERNAL_HOST + path_pattern,
|
|
|
|
message.body).groups()[0]
|
|
|
|
else:
|
|
|
|
raise ValueError("Couldn't find a confirmation email.")
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def get_api_key(self, email):
|
2016-06-28 22:20:38 +02:00
|
|
|
# type: (text_type) -> text_type
|
2014-01-27 23:43:02 +01:00
|
|
|
if email not in API_KEYS:
|
2016-06-03 02:10:13 +02:00
|
|
|
API_KEYS[email] = get_user_profile_by_email(email).api_key
|
2014-01-27 23:43:02 +01:00
|
|
|
return API_KEYS[email]
|
|
|
|
|
|
|
|
def api_auth(self, email):
|
2016-06-28 22:20:38 +02:00
|
|
|
# type: (text_type) -> Dict[str, text_type]
|
|
|
|
credentials = u"%s:%s" % (email, self.get_api_key(email))
|
2014-01-27 23:43:02 +01:00
|
|
|
return {
|
2016-06-28 22:20:38 +02:00
|
|
|
'HTTP_AUTHORIZATION': u'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
|
2016-06-03 02:10:13 +02:00
|
|
|
}
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
def get_streams(self, email):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type) -> List[text_type]
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Helper function to get the stream names for a user
|
|
|
|
"""
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
subs = Subscription.objects.filter(
|
2016-06-03 02:10:13 +02:00
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM)
|
2016-06-12 00:47:19 +02:00
|
|
|
return [cast(text_type, get_display_recipient(sub.recipient)) for sub in subs]
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-06-05 21:18:15 +02:00
|
|
|
def send_message(self, sender_name, raw_recipients, message_type,
|
|
|
|
content=u"test content", subject=u"test", **kwargs):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (text_type, Union[text_type, List[text_type]], int, text_type, text_type, **Any) -> int
|
2014-01-27 23:43:02 +01:00
|
|
|
sender = get_user_profile_by_email(sender_name)
|
|
|
|
if message_type == Recipient.PERSONAL:
|
|
|
|
message_type_name = "private"
|
|
|
|
else:
|
|
|
|
message_type_name = "stream"
|
2016-06-05 21:18:15 +02:00
|
|
|
if isinstance(raw_recipients, six.string_types):
|
|
|
|
recipient_list = [raw_recipients]
|
|
|
|
else:
|
|
|
|
recipient_list = raw_recipients
|
2014-01-27 23:43:02 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
|
|
|
|
|
|
|
return check_send_message(
|
|
|
|
sender, sending_client, message_type_name, recipient_list, subject,
|
|
|
|
content, forged=False, forged_timestamp=None,
|
|
|
|
forwarder_user_profile=sender, realm=sender.realm, **kwargs)
|
|
|
|
|
|
|
|
def get_old_messages(self, anchor=1, num_before=100, num_after=100):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (int, int, int) -> List[Dict[str, Any]]
|
2014-01-27 23:43:02 +01:00
|
|
|
post_params = {"anchor": anchor, "num_before": num_before,
|
|
|
|
"num_after": num_after}
|
2016-07-28 00:38:45 +02:00
|
|
|
result = self.client_get("/json/messages", dict(post_params))
|
2014-01-27 23:43:02 +01:00
|
|
|
data = ujson.loads(result.content)
|
|
|
|
return data['messages']
|
|
|
|
|
|
|
|
def users_subscribed_to_stream(self, stream_name, realm_domain):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type, text_type) -> List[UserProfile]
|
2015-10-13 22:54:35 +02:00
|
|
|
realm = get_realm(realm_domain)
|
2014-01-27 23:43:02 +01:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
2014-01-29 18:54:48 +01:00
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
|
|
|
def assert_json_success(self, result):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse) -> Dict[str, Any]
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
|
|
|
Successful POSTs return a 200 and JSON of the form {"result": "success",
|
|
|
|
"msg": ""}.
|
|
|
|
"""
|
|
|
|
self.assertEqual(result.status_code, 200, result)
|
|
|
|
json = ujson.loads(result.content)
|
|
|
|
self.assertEqual(json.get("result"), "success")
|
|
|
|
# We have a msg key for consistency with errors, but it typically has an
|
|
|
|
# empty value.
|
|
|
|
self.assertIn("msg", json)
|
2014-01-30 22:50:51 +01:00
|
|
|
return json
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2014-01-31 23:12:10 +01:00
|
|
|
def get_json_error(self, result, status_code=400):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (HttpResponse, int) -> Dict[str, Any]
|
2014-01-31 23:12:10 +01:00
|
|
|
self.assertEqual(result.status_code, status_code)
|
2014-01-27 23:43:02 +01:00
|
|
|
json = ujson.loads(result.content)
|
|
|
|
self.assertEqual(json.get("result"), "error")
|
|
|
|
return json['msg']
|
|
|
|
|
2014-01-31 23:12:10 +01:00
|
|
|
def assert_json_error(self, result, msg, status_code=400):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (HttpResponse, text_type, int) -> None
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
2014-01-31 23:12:10 +01:00
|
|
|
Invalid POSTs return an error status code and JSON of the form
|
|
|
|
{"result": "error", "msg": "reason"}.
|
2014-01-27 23:43:02 +01:00
|
|
|
"""
|
2014-01-31 23:12:10 +01:00
|
|
|
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-25 21:30:10 +02:00
|
|
|
def assert_length(self, queries, count):
|
|
|
|
# type: (Sized, int) -> None
|
2014-03-02 06:46:54 +01:00
|
|
|
actual_count = len(queries)
|
2016-09-25 21:30:10 +02:00
|
|
|
return self.assertTrue(actual_count == count,
|
2014-03-02 06:46:54 +01:00
|
|
|
"len(%s) == %s, != %s" % (queries, actual_count, count))
|
2016-09-25 21:30:10 +02:00
|
|
|
|
|
|
|
def assert_max_length(self, queries, count):
|
|
|
|
# type: (Sized, int) -> None
|
|
|
|
actual_count = len(queries)
|
2014-03-02 06:46:54 +01:00
|
|
|
return self.assertTrue(actual_count <= count,
|
|
|
|
"len(%s) == %s, > %s" % (queries, actual_count, count))
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-04-21 21:37:31 +02:00
|
|
|
def assert_json_error_contains(self, result, msg_substring, status_code=400):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (HttpResponse, text_type, int) -> None
|
2016-04-21 21:37:31 +02:00
|
|
|
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-07-12 20:21:47 +02:00
|
|
|
def assert_equals_response(self, string, response):
|
|
|
|
# type: (text_type, HttpResponse) -> None
|
|
|
|
self.assertEqual(string, response.content.decode('utf-8'))
|
|
|
|
|
|
|
|
def assert_in_response(self, substring, response):
|
|
|
|
# type: (text_type, HttpResponse) -> None
|
|
|
|
self.assertIn(substring, response.content.decode('utf-8'))
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def fixture_data(self, type, action, file_type='json'):
|
2016-06-16 01:23:52 +02:00
|
|
|
# type: (text_type, text_type, text_type) -> text_type
|
|
|
|
return force_text(open(os.path.join(os.path.dirname(__file__),
|
|
|
|
"../fixtures/%s/%s_%s.%s" % (type, type, action, file_type))).read())
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
# Subscribe to a stream directly
|
|
|
|
def subscribe_to_stream(self, email, stream_name, realm=None):
|
2016-06-13 10:14:33 +02:00
|
|
|
# type: (text_type, text_type, Optional[Realm]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
if realm is None:
|
|
|
|
realm = get_realm(resolve_email_to_domain(email))
|
2016-01-24 02:29:35 +01:00
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
if stream is None:
|
|
|
|
stream, _ = create_stream_if_needed(realm, stream_name)
|
2014-01-27 23:43:02 +01:00
|
|
|
user_profile = get_user_profile_by_email(email)
|
2016-10-20 00:08:03 +02:00
|
|
|
bulk_add_subscriptions([stream], [user_profile])
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-10-20 16:49:29 +02:00
|
|
|
def unsubscribe_from_stream(self, email, stream_name):
|
|
|
|
# type: (text_type, text_type) -> None
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
bulk_remove_subscriptions([user_profile], [stream])
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
# Subscribe to a stream by making an API request
|
2016-06-03 02:10:13 +02:00
|
|
|
def common_subscribe_to_streams(self, email, streams, extra_post_data={}, invite_only=False):
|
2016-06-28 20:52:27 +02:00
|
|
|
# type: (text_type, Iterable[text_type], Dict[str, Any], bool) -> HttpResponse
|
2014-01-27 23:43:02 +01:00
|
|
|
post_data = {'subscriptions': ujson.dumps([{"name": stream} for stream in streams]),
|
|
|
|
'invite_only': ujson.dumps(invite_only)}
|
|
|
|
post_data.update(extra_post_data)
|
2016-07-28 00:30:22 +02:00
|
|
|
result = self.client_post("/api/v1/users/me/subscriptions", post_data, **self.api_auth(email))
|
2014-01-27 23:43:02 +01:00
|
|
|
return result
|
|
|
|
|
|
|
|
def send_json_payload(self, email, url, payload, stream_name=None, **post_params):
|
2016-06-16 01:26:38 +02:00
|
|
|
# type: (text_type, text_type, Union[text_type, Dict[str, Any]], Optional[text_type], **Any) -> Message
|
2016-06-03 02:10:13 +02:00
|
|
|
if stream_name is not None:
|
2014-01-27 23:43:02 +01:00
|
|
|
self.subscribe_to_stream(email, stream_name)
|
|
|
|
|
2016-07-28 00:30:22 +02:00
|
|
|
result = self.client_post(url, payload, **post_params)
|
2014-01-27 23:43:02 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check the correct message was sent
|
2016-05-16 23:37:53 +02:00
|
|
|
msg = self.get_last_message()
|
2014-01-27 23:43:02 +01:00
|
|
|
self.assertEqual(msg.sender.email, email)
|
2016-07-01 00:11:02 +02:00
|
|
|
if stream_name is not None:
|
|
|
|
self.assertEqual(get_display_recipient(msg.recipient), stream_name)
|
|
|
|
# TODO: should also validate recipient for private messages
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2016-04-13 23:59:08 +02:00
|
|
|
def get_last_message(self):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> Message
|
2016-04-13 23:59:08 +02:00
|
|
|
return Message.objects.latest('id')
|
2016-05-20 14:53:47 +02:00
|
|
|
|
2016-05-25 20:42:28 +02:00
|
|
|
def get_second_to_last_message(self):
|
2016-07-30 00:59:39 +02:00
|
|
|
# type: () -> Message
|
2016-05-25 20:42:28 +02:00
|
|
|
return Message.objects.all().order_by('-id')[1]
|
|
|
|
|
2016-09-15 22:05:56 +02:00
|
|
|
@contextmanager
|
|
|
|
def simulated_markdown_failure(self):
|
|
|
|
# type: () -> Generator[None, None, None]
|
|
|
|
'''
|
|
|
|
This raises a failure inside of the try/except block of
|
|
|
|
bugdown.__init__.do_convert.
|
|
|
|
'''
|
|
|
|
with \
|
|
|
|
self.settings(ERROR_BOT=None), \
|
|
|
|
mock.patch('zerver.lib.bugdown.timeout', side_effect=KeyError('foo')), \
|
|
|
|
mock.patch('zerver.lib.bugdown.log_bugdown_error'):
|
|
|
|
yield
|
|
|
|
|
2016-09-20 00:18:07 +02:00
|
|
|
class WebhookTestCase(ZulipTestCase):
|
|
|
|
"""
|
|
|
|
Common for all webhooks tests
|
|
|
|
|
|
|
|
Override below class attributes and run send_and_test_message
|
|
|
|
If you create your url in uncommon way you can override build_webhook_url method
|
|
|
|
In case that you need modify body or create it without using fixture you can also override get_body method
|
|
|
|
"""
|
|
|
|
STREAM_NAME = None # type: Optional[text_type]
|
|
|
|
TEST_USER_EMAIL = 'webhook-bot@zulip.com'
|
|
|
|
URL_TEMPLATE = None # type: Optional[text_type]
|
|
|
|
FIXTURE_DIR_NAME = None # type: Optional[text_type]
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# type: () -> None
|
|
|
|
self.url = self.build_webhook_url()
|
|
|
|
|
|
|
|
def send_and_test_stream_message(self, fixture_name, expected_subject=None,
|
|
|
|
expected_message=None, content_type="application/json", **kwargs):
|
|
|
|
# type: (text_type, Optional[text_type], Optional[text_type], Optional[text_type], **Any) -> Message
|
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
|
|
|
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
|
|
|
|
self.STREAM_NAME, **kwargs)
|
|
|
|
self.do_test_subject(msg, expected_subject)
|
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def send_and_test_private_message(self, fixture_name, expected_subject=None,
|
|
|
|
expected_message=None, content_type="application/json", **kwargs):
|
|
|
|
# type: (text_type, text_type, text_type, str, **Any) -> Message
|
|
|
|
payload = self.get_body(fixture_name)
|
|
|
|
if content_type is not None:
|
|
|
|
kwargs['content_type'] = content_type
|
|
|
|
|
|
|
|
msg = self.send_json_payload(self.TEST_USER_EMAIL, self.url, payload,
|
|
|
|
stream_name=None, **kwargs)
|
|
|
|
self.do_test_message(msg, expected_message)
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
|
|
|
def build_webhook_url(self):
|
|
|
|
# type: () -> text_type
|
|
|
|
api_key = self.get_api_key(self.TEST_USER_EMAIL)
|
|
|
|
return self.URL_TEMPLATE.format(stream=self.STREAM_NAME, api_key=api_key)
|
|
|
|
|
|
|
|
def get_body(self, fixture_name):
|
|
|
|
# type: (text_type) -> Union[text_type, Dict[str, text_type]]
|
|
|
|
"""Can be implemented either as returning a dictionary containing the
|
|
|
|
post parameters or as string containing the body of the request."""
|
|
|
|
return ujson.dumps(ujson.loads(self.fixture_data(self.FIXTURE_DIR_NAME, fixture_name)))
|
|
|
|
|
|
|
|
def do_test_subject(self, msg, expected_subject):
|
|
|
|
# type: (Message, Optional[text_type]) -> None
|
|
|
|
if expected_subject is not None:
|
|
|
|
self.assertEqual(msg.topic_name(), expected_subject)
|
|
|
|
|
|
|
|
def do_test_message(self, msg, expected_message):
|
|
|
|
# type: (Message, Optional[text_type]) -> None
|
|
|
|
if expected_message is not None:
|
|
|
|
self.assertEqual(msg.content, expected_message)
|
2016-09-15 22:05:56 +02:00
|
|
|
|
2016-05-20 14:53:47 +02:00
|
|
|
def get_all_templates():
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> List[str]
|
2016-05-20 14:53:47 +02:00
|
|
|
templates = []
|
|
|
|
|
|
|
|
relpath = os.path.relpath
|
|
|
|
isfile = os.path.isfile
|
|
|
|
path_exists = os.path.exists
|
|
|
|
|
2016-07-04 10:15:50 +02:00
|
|
|
def is_valid_template(p, n):
|
2016-07-30 00:59:39 +02:00
|
|
|
# type: (text_type, text_type) -> bool
|
|
|
|
return not n.startswith('.') and not n.startswith('__init__') and isfile(p)
|
2016-05-20 14:53:47 +02:00
|
|
|
|
|
|
|
def process(template_dir, dirname, fnames):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, str, Iterable[str]) -> None
|
2016-05-20 14:53:47 +02:00
|
|
|
for name in fnames:
|
|
|
|
path = os.path.join(dirname, name)
|
|
|
|
if is_valid_template(path, name):
|
|
|
|
templates.append(relpath(path, template_dir))
|
|
|
|
|
|
|
|
for engine in loader.engines.all():
|
|
|
|
template_dirs = [d for d in engine.template_dirs if path_exists(d)]
|
|
|
|
for template_dir in template_dirs:
|
|
|
|
template_dir = os.path.normpath(template_dir)
|
2016-07-03 15:00:26 +02:00
|
|
|
for dirpath, dirnames, fnames in os.walk(template_dir):
|
|
|
|
process(template_dir, dirpath, fnames)
|
2016-05-20 14:53:47 +02:00
|
|
|
|
|
|
|
return templates
|