2015-11-01 17:14:25 +01:00
from __future__ import absolute_import
2016-07-28 01:40:28 +02:00
from __future__ import print_function
2016-05-18 20:35:35 +02:00
from contextlib import contextmanager
2016-11-24 19:12:55 +01:00
from typing import ( cast , Any , Callable , Dict , Generator , Iterable , Iterator , List , Mapping ,
2017-03-03 19:01:52 +01:00
Optional , Set , Sized , Tuple , Union , IO )
2016-01-25 20:38:44 +01:00
2017-04-19 10:04:23 +02:00
from django . core import signing
2016-11-24 19:45:40 +01:00
from django . core . urlresolvers import LocaleRegexURLResolver
2016-09-23 04:23:48 +02:00
from django . conf import settings
2014-01-27 23:43:02 +01:00
from django . test import TestCase
2016-07-14 00:21:01 +02:00
from django . test . client import (
BOUNDARY , MULTIPART_CONTENT , encode_multipart ,
)
2016-05-20 14:53:47 +02:00
from django . template import loader
2016-06-03 02:10:13 +02:00
from django . http import HttpResponse
2016-10-21 22:13:23 +02:00
from django . db . utils import IntegrityError
2014-01-27 23:43:02 +01:00
2016-12-19 08:48:03 +01:00
from zerver . lib . avatar import avatar_url
2017-02-13 09:19:52 +01:00
from zerver . lib . cache import get_cache_backend
2014-01-27 23:43:02 +01:00
from zerver . lib . initial_password import initial_password
2014-01-27 22:53:36 +01:00
from zerver . lib . db import TimeTrackingCursor
2016-06-16 01:23:52 +02:00
from zerver . lib . str_utils import force_text
2014-01-27 22:53:36 +01:00
from zerver . lib import cache
2016-11-27 06:56:06 +01:00
from zerver . tornado import event_queue
2016-11-27 06:36:06 +01:00
from zerver . tornado . handlers import allocate_handler_id
2014-01-27 22:53:36 +01:00
from zerver . worker import queue_processors
2014-01-27 23:43:02 +01:00
from zerver . lib . actions import (
2016-10-20 00:08:03 +02:00
check_send_message , create_stream_if_needed , bulk_add_subscriptions ,
2016-10-20 16:49:29 +02:00
get_display_recipient , bulk_remove_subscriptions
2014-01-27 23:43:02 +01:00
)
from zerver . models import (
2017-01-30 04:31:24 +01:00
get_recipient ,
2016-01-24 02:29:35 +01:00
get_stream ,
2015-10-13 22:54:35 +02:00
get_user_profile_by_email ,
2014-01-27 23:43:02 +01:00
Client ,
Message ,
Realm ,
Recipient ,
Stream ,
Subscription ,
UserMessage ,
2016-06-03 02:10:13 +02:00
UserProfile ,
2014-01-27 23:43:02 +01:00
)
2016-05-18 20:35:35 +02:00
from zerver . lib . request import JsonableError
2016-11-24 19:45:40 +01:00
import collections
2014-01-27 22:53:36 +01:00
import base64
2016-09-15 22:05:56 +02:00
import mock
2014-01-27 22:53:36 +01:00
import os
import re
2016-11-19 01:28:28 +01:00
import sys
2014-01-27 22:53:36 +01:00
import time
import ujson
2016-07-12 06:35:50 +02:00
import unittest
2016-01-24 03:39:44 +01:00
from six . moves import urllib
2016-12-21 13:17:53 +01:00
from six import binary_type
from typing import Text
2016-06-28 07:10:38 +02:00
from zerver . lib . str_utils import NonBinaryStr
2014-01-27 22:53:36 +01:00
from contextlib import contextmanager
2015-11-01 17:14:25 +01:00
import six
2016-12-13 10:59:54 +01:00
import fakeldap
import ldap
class MockLDAP ( fakeldap . MockLDAP ) :
class LDAPError ( ldap . LDAPError ) :
pass
class INVALID_CREDENTIALS ( ldap . INVALID_CREDENTIALS ) :
pass
class NO_SUCH_OBJECT ( ldap . NO_SUCH_OBJECT ) :
pass
class ALREADY_EXISTS ( ldap . ALREADY_EXISTS ) :
pass
2014-01-27 22:53:36 +01:00
@contextmanager
def simulated_queue_client ( client ) :
2016-11-24 19:12:55 +01:00
# type: (type) -> Iterator[None]
2014-01-27 22:53:36 +01:00
real_SimpleQueueClient = queue_processors . SimpleQueueClient
2016-01-27 21:14:58 +01:00
queue_processors . SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
yield
2016-01-27 21:14:58 +01:00
queue_processors . SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
@contextmanager
def tornado_redirected_to_list ( lst ) :
2016-11-24 19:12:55 +01:00
# type: (List[Mapping[str, Any]]) -> Iterator[None]
2014-04-24 02:16:53 +02:00
real_event_queue_process_notification = event_queue . process_notification
2017-02-01 09:48:47 +01:00
event_queue . process_notification = lambda notice : lst . append ( notice )
# process_notification takes a single parameter called 'notice'.
# lst.append takes a single argument called 'object'.
# Some code might call process_notification using keyword arguments,
# so mypy doesn't allow assigning lst.append to process_notification
# So explicitly change parameter name to 'notice' to work around this problem
2014-01-27 22:53:36 +01:00
yield
2014-04-24 02:16:53 +02:00
event_queue . process_notification = real_event_queue_process_notification
2014-01-27 22:53:36 +01:00
@contextmanager
def simulated_empty_cache ( ) :
2016-12-21 13:17:53 +01:00
# type: () -> Generator[List[Tuple[str, Union[Text, List[Text]], Text]], None, None]
cache_queries = [ ] # type: List[Tuple[str, Union[Text, List[Text]], Text]]
2016-11-29 07:22:02 +01:00
2014-01-27 22:53:36 +01:00
def my_cache_get ( key , cache_name = None ) :
2017-02-11 05:26:24 +01:00
# type: (Text, Optional[str]) -> Optional[Dict[Text, Any]]
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' get ' , key , cache_name ) )
return None
2017-03-05 09:06:36 +01:00
def my_cache_get_many ( keys , cache_name = None ) : # nocoverage -- simulated code doesn't use this
2016-12-21 13:17:53 +01:00
# type: (List[Text], Optional[str]) -> Dict[Text, Any]
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' getmany ' , keys , cache_name ) )
2017-02-11 05:26:24 +01:00
return { }
2014-01-27 22:53:36 +01:00
old_get = cache . cache_get
old_get_many = cache . cache_get_many
cache . cache_get = my_cache_get
cache . cache_get_many = my_cache_get_many
yield cache_queries
cache . cache_get = old_get
cache . cache_get_many = old_get_many
@contextmanager
2016-10-26 16:17:25 +02:00
def queries_captured ( include_savepoints = False ) :
# type: (Optional[bool]) -> Generator[List[Dict[str, Union[str, binary_type]]], None, None]
2014-01-27 22:53:36 +01:00
'''
Allow a user to capture just the queries executed during
the with statement .
'''
2016-06-28 07:10:38 +02:00
queries = [ ] # type: List[Dict[str, Union[str, binary_type]]]
2014-01-27 22:53:36 +01:00
def wrapper_execute ( self , action , sql , params = ( ) ) :
2016-06-28 07:10:38 +02:00
# type: (TimeTrackingCursor, Callable, NonBinaryStr, Iterable[Any]) -> None
2017-02-13 09:19:52 +01:00
cache = get_cache_backend ( None )
cache . clear ( )
2014-01-27 22:53:36 +01:00
start = time . time ( )
try :
return action ( sql , params )
finally :
stop = time . time ( )
duration = stop - start
2016-10-26 16:17:25 +02:00
if include_savepoints or ( ' SAVEPOINT ' not in sql ) :
queries . append ( {
' sql ' : self . mogrify ( sql , params ) . decode ( ' utf-8 ' ) ,
' time ' : " %.3f " % duration ,
} )
2014-01-27 22:53:36 +01:00
old_execute = TimeTrackingCursor . execute
old_executemany = TimeTrackingCursor . executemany
def cursor_execute ( self , sql , params = ( ) ) :
2016-06-28 07:10:38 +02:00
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
2016-06-03 02:10:13 +02:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . execute , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2016-01-27 21:14:58 +01:00
TimeTrackingCursor . execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
def cursor_executemany ( self , sql , params = ( ) ) :
2016-06-28 07:10:38 +02:00
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
2017-03-05 09:06:36 +01:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . executemany , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167 # nocoverage -- doesn't actually get used in tests
2016-01-27 21:14:58 +01:00
TimeTrackingCursor . executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
yield queries
2016-01-27 21:14:58 +01:00
TimeTrackingCursor . execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor . executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
2017-02-09 22:58:43 +01:00
@contextmanager
def stdout_suppressed ( ) :
# type: () -> Iterator[IO[str]]
""" Redirect stdout to /dev/null. """
with open ( os . devnull , ' a ' ) as devnull :
stdout , sys . stdout = sys . stdout , devnull # type: ignore
yield stdout
sys . stdout = stdout
2016-12-19 08:48:03 +01:00
def get_test_image_file ( filename ) :
# type: (str) -> IO[Any]
test_avatar_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , ' ../tests/images ' ) )
return open ( os . path . join ( test_avatar_dir , filename ) , ' rb ' )
def avatar_disk_path ( user_profile , medium = False ) :
# type: (UserProfile, bool) -> str
avatar_url_path = avatar_url ( user_profile , medium )
avatar_disk_path = os . path . join ( settings . LOCAL_UPLOADS_DIR , " avatars " ,
2017-03-03 00:15:05 +01:00
avatar_url_path . split ( " / " ) [ - 2 ] ,
2016-12-19 08:48:03 +01:00
avatar_url_path . split ( " / " ) [ - 1 ] . split ( " ? " ) [ 0 ] )
return avatar_disk_path
2014-01-27 23:43:02 +01:00
2016-09-13 23:32:35 +02:00
def make_client ( name ) :
# type: (str) -> Client
client , _ = Client . objects . get_or_create ( name = name )
return client
2014-01-27 23:43:02 +01:00
def find_key_by_email ( address ) :
2017-03-03 20:30:49 +01:00
# type: (Text) -> Optional[Text]
2014-01-27 23:43:02 +01:00
from django . core . mail import outbox
key_regex = re . compile ( " accounts/do_confirm/([a-f0-9] {40} )> " )
for message in reversed ( outbox ) :
if address in message . to :
return key_regex . search ( message . body ) . groups ( ) [ 0 ]
2017-03-05 09:06:36 +01:00
return None # nocoverage -- in theory a test might want this case, but none do
2014-01-27 23:43:02 +01:00
2017-02-12 21:21:31 +01:00
def find_pattern_in_email ( address , pattern ) :
2017-03-03 20:30:49 +01:00
# type: (Text, Text) -> Optional[Text]
2017-02-12 21:21:31 +01:00
from django . core . mail import outbox
key_regex = re . compile ( pattern )
for message in reversed ( outbox ) :
if address in message . to :
return key_regex . search ( message . body ) . group ( 0 )
2017-03-05 09:06:36 +01:00
return None # nocoverage -- in theory a test might want this case, but none do
2017-02-12 21:21:31 +01:00
2014-01-31 16:44:45 +01:00
def message_stream_count ( user_profile ) :
2016-06-03 02:10:13 +02:00
# type: (UserProfile) -> int
2014-01-31 16:44:45 +01:00
return UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
count ( )
def most_recent_usermessage ( user_profile ) :
2016-06-03 02:10:13 +02:00
# type: (UserProfile) -> UserMessage
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' -message ' )
return query [ 0 ] # Django does LIMIT here
def most_recent_message ( user_profile ) :
2016-06-03 02:10:13 +02:00
# type: (UserProfile) -> Message
2014-01-31 16:44:45 +01:00
usermessage = most_recent_usermessage ( user_profile )
return usermessage . message
2017-01-30 04:31:24 +01:00
def get_subscription ( stream_name , user_profile ) :
# type: (Text, UserProfile) -> Subscription
stream = get_stream ( stream_name , user_profile . realm )
recipient = get_recipient ( Recipient . STREAM , stream . id )
return Subscription . objects . get ( user_profile = user_profile ,
recipient = recipient , active = True )
2014-01-31 16:44:45 +01:00
def get_user_messages ( user_profile ) :
2016-06-03 02:10:13 +02:00
# type: (UserProfile) -> List[Message]
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' message ' )
return [ um . message for um in query ]
class DummyHandler ( object ) :
2016-07-14 01:28:40 +02:00
def __init__ ( self ) :
2016-12-03 19:50:14 +01:00
# type: () -> None
2016-11-27 06:36:06 +01:00
allocate_handler_id ( self ) # type: ignore # this is a testing mock
2014-01-31 16:44:45 +01:00
class POSTRequestMock ( object ) :
method = " POST "
2016-07-14 01:28:40 +02:00
def __init__ ( self , post_data , user_profile ) :
# type: (Dict[str, Any], UserProfile) -> None
2016-11-03 13:00:18 +01:00
self . GET = { } # type: Dict[str, Any]
self . POST = post_data
2014-01-31 16:44:45 +01:00
self . user = user_profile
2016-07-14 01:28:40 +02:00
self . _tornado_handler = DummyHandler ( )
2016-01-25 23:42:16 +01:00
self . _log_data = { } # type: Dict[str, Any]
2014-01-31 16:44:45 +01:00
self . META = { ' PATH_INFO ' : ' test ' }
2014-01-27 23:43:02 +01:00
2016-09-28 06:06:21 +02:00
class HostRequestMock ( object ) :
""" A mock request object where get_host() works. Useful for testing
routes that use Zulip ' s subdomains feature " " "
2016-11-29 07:22:02 +01:00
2016-09-28 06:06:21 +02:00
def __init__ ( self , host = settings . EXTERNAL_HOST ) :
2016-12-21 13:17:53 +01:00
# type: (Text) -> None
2016-09-28 06:06:21 +02:00
self . host = host
def get_host ( self ) :
2016-12-21 13:17:53 +01:00
# type: () -> Text
2016-09-28 06:06:21 +02:00
return self . host
2016-12-13 04:20:33 +01:00
class MockPythonResponse ( object ) :
def __init__ ( self , text , status_code ) :
2016-12-21 13:17:53 +01:00
# type: (Text, int) -> None
2016-12-13 04:20:33 +01:00
self . text = text
self . status_code = status_code
@property
def ok ( self ) :
# type: () -> bool
return self . status_code == 200
2016-07-28 01:40:28 +02:00
INSTRUMENTING = os . environ . get ( ' TEST_INSTRUMENT_URL_COVERAGE ' , ' ' ) == ' TRUE '
2016-09-12 17:29:34 +02:00
INSTRUMENTED_CALLS = [ ] # type: List[Dict[str, Any]]
2016-07-28 01:40:28 +02:00
2016-09-12 03:06:25 +02:00
UrlFuncT = Callable [ . . . , HttpResponse ] # TODO: make more specific
2017-02-10 05:42:41 +01:00
def append_instrumentation_data ( data ) :
# type: (Dict[str, Any]) -> None
INSTRUMENTED_CALLS . append ( data )
2016-07-28 01:40:28 +02:00
def instrument_url ( f ) :
2016-09-12 03:06:25 +02:00
# type: (UrlFuncT) -> UrlFuncT
2017-03-05 09:06:36 +01:00
if not INSTRUMENTING : # nocoverage -- option is always enabled; should we remove?
2016-07-28 01:40:28 +02:00
return f
else :
def wrapper ( self , url , info = { } , * * kwargs ) :
2016-12-21 13:17:53 +01:00
# type: (Any, Text, Dict[str, Any], **Any) -> HttpResponse
2016-07-28 01:40:28 +02:00
start = time . time ( )
result = f ( self , url , info , * * kwargs )
delay = time . time ( ) - start
test_name = self . id ( )
if ' ? ' in url :
url , extra_info = url . split ( ' ? ' , 1 )
else :
extra_info = ' '
2017-02-10 05:42:41 +01:00
append_instrumentation_data ( dict (
2016-07-28 01:40:28 +02:00
url = url ,
status_code = result . status_code ,
method = f . __name__ ,
delay = delay ,
extra_info = extra_info ,
info = info ,
test_name = test_name ,
kwargs = kwargs ) )
return result
return wrapper
2016-11-19 01:28:28 +01:00
def write_instrumentation_reports ( full_suite ) :
# type: (bool) -> None
2016-07-28 01:40:28 +02:00
if INSTRUMENTING :
2016-07-28 02:40:04 +02:00
calls = INSTRUMENTED_CALLS
2016-11-24 19:45:40 +01:00
from zproject . urls import urlpatterns , v1_api_and_json_patterns
# Find our untested urls.
pattern_cnt = collections . defaultdict ( int ) # type: Dict[str, int]
def re_strip ( r ) :
# type: (Any) -> str
return str ( r ) . lstrip ( ' ^ ' ) . rstrip ( ' $ ' )
def find_patterns ( patterns , prefixes ) :
# type: (List[Any], List[str]) -> None
for pattern in patterns :
find_pattern ( pattern , prefixes )
def cleanup_url ( url ) :
# type: (str) -> str
if url . startswith ( ' / ' ) :
url = url [ 1 : ]
if url . startswith ( ' http://testserver/ ' ) :
url = url [ len ( ' http://testserver/ ' ) : ]
if url . startswith ( ' http://zulip.testserver/ ' ) :
url = url [ len ( ' http://zulip.testserver/ ' ) : ]
if url . startswith ( ' http://testserver:9080/ ' ) :
url = url [ len ( ' http://testserver:9080/ ' ) : ]
return url
def find_pattern ( pattern , prefixes ) :
# type: (Any, List[str]) -> None
if isinstance ( pattern , type ( LocaleRegexURLResolver ) ) :
2017-03-05 09:06:36 +01:00
return # nocoverage -- shouldn't actually happen
2016-11-24 19:45:40 +01:00
if hasattr ( pattern , ' url_patterns ' ) :
return
canon_pattern = prefixes [ 0 ] + re_strip ( pattern . regex . pattern )
cnt = 0
for call in calls :
if ' pattern ' in call :
continue
url = cleanup_url ( call [ ' url ' ] )
for prefix in prefixes :
if url . startswith ( prefix ) :
match_url = url [ len ( prefix ) : ]
if pattern . regex . match ( match_url ) :
if call [ ' status_code ' ] in [ 200 , 204 , 301 , 302 ] :
cnt + = 1
call [ ' pattern ' ] = canon_pattern
pattern_cnt [ canon_pattern ] + = cnt
find_patterns ( urlpatterns , [ ' ' , ' en/ ' , ' de/ ' ] )
find_patterns ( v1_api_and_json_patterns , [ ' api/v1/ ' , ' json/ ' ] )
assert len ( pattern_cnt ) > 100
untested_patterns = set ( [ p for p in pattern_cnt if pattern_cnt [ p ] == 0 ] )
exempt_patterns = set ( [
2017-03-23 19:59:24 +01:00
# We exempt some patterns that are called via Tornado.
2016-11-24 19:45:40 +01:00
' api/v1/events ' ,
' api/v1/register ' ,
2017-03-23 19:59:24 +01:00
# We also exempt some development environment debugging
# static content URLs, since the content they point to may
# or may not exist.
' coverage/(?P<path>.*) ' ,
' docs/(?P<path>.*) ' ,
2016-11-24 19:45:40 +01:00
] )
untested_patterns - = exempt_patterns
2016-07-28 01:40:28 +02:00
var_dir = ' var ' # TODO make sure path is robust here
fn = os . path . join ( var_dir , ' url_coverage.txt ' )
with open ( fn , ' w ' ) as f :
2016-07-28 02:40:04 +02:00
for call in calls :
2016-08-18 17:10:15 +02:00
try :
line = ujson . dumps ( call )
f . write ( line + ' \n ' )
2017-03-05 09:06:36 +01:00
except OverflowError : # nocoverage -- test suite error handling
2016-08-18 17:10:15 +02:00
print ( '''
A JSON overflow error was encountered while
producing the URL coverage report . Sometimes
this indicates that a test is passing objects
into methods like client_post ( ) , which is
unnecessary and leads to false positives .
''' )
print ( call )
2016-07-28 02:40:04 +02:00
2016-11-19 01:28:28 +01:00
if full_suite :
2016-11-30 01:40:05 +01:00
print ( ' INFO: URL coverage report is in %s ' % ( fn , ) )
print ( ' INFO: Try running: ./tools/create-test-api-docs ' )
2016-07-28 01:40:28 +02:00
2017-03-05 09:06:36 +01:00
if full_suite and len ( untested_patterns ) : # nocoverage -- test suite error handling
2016-11-19 01:28:28 +01:00
print ( " \n ERROR: Some URLs are untested! Here ' s the list of untested URLs: " )
2016-07-28 02:40:04 +02:00
for untested_pattern in sorted ( untested_patterns ) :
2016-11-19 01:28:28 +01:00
print ( " %s " % ( untested_pattern , ) )
sys . exit ( 1 )
2016-07-28 02:40:04 +02:00
2016-05-20 14:53:47 +02:00
def get_all_templates ( ) :
2016-06-03 02:10:13 +02:00
# type: () -> List[str]
2016-05-20 14:53:47 +02:00
templates = [ ]
relpath = os . path . relpath
isfile = os . path . isfile
path_exists = os . path . exists
2016-07-04 10:15:50 +02:00
def is_valid_template ( p , n ) :
2016-12-21 13:17:53 +01:00
# type: (Text, Text) -> bool
2016-11-26 00:25:05 +01:00
return ' webhooks ' not in p \
and not n . startswith ( ' . ' ) \
and not n . startswith ( ' __init__ ' ) \
and not n . endswith ( ' .md ' ) \
and isfile ( p )
2016-05-20 14:53:47 +02:00
def process ( template_dir , dirname , fnames ) :
2016-06-03 02:10:13 +02:00
# type: (str, str, Iterable[str]) -> None
2016-05-20 14:53:47 +02:00
for name in fnames :
path = os . path . join ( dirname , name )
if is_valid_template ( path , name ) :
templates . append ( relpath ( path , template_dir ) )
for engine in loader . engines . all ( ) :
template_dirs = [ d for d in engine . template_dirs if path_exists ( d ) ]
for template_dir in template_dirs :
template_dir = os . path . normpath ( template_dir )
2016-07-03 15:00:26 +02:00
for dirpath , dirnames , fnames in os . walk ( template_dir ) :
process ( template_dir , dirpath , fnames )
2016-05-20 14:53:47 +02:00
return templates
2017-04-19 10:04:23 +02:00
def unsign_subdomain_cookie ( result ) :
# type: (HttpResponse) -> Dict[str, Any]
key = ' subdomain.signature '
salt = key + ' zerver.views.auth '
cookie = result . cookies . get ( key )
value = signing . get_cookie_signer ( salt = salt ) . unsign ( cookie . value , max_age = 15 )
return ujson . loads ( value )