2016-05-18 20:35:35 +02:00
from contextlib import contextmanager
2017-10-28 22:52:40 +02:00
from typing import (
2019-02-02 23:53:55 +01:00
Any , Callable , Dict , Generator , Iterable , Iterator , List , Mapping ,
2019-07-30 20:58:48 +02:00
Optional , Tuple , Union , IO , TypeVar , TYPE_CHECKING
2017-10-28 22:52:40 +02:00
)
2016-01-25 20:38:44 +01:00
2018-02-02 05:43:18 +01:00
from django . urls import URLResolver
2016-09-23 04:23:48 +02:00
from django . conf import settings
2019-02-02 23:53:55 +01:00
from django . test import override_settings
2017-10-27 02:45:38 +02:00
from django . http import HttpResponse , HttpResponseRedirect
2018-04-25 00:37:55 +02:00
from django . db . migrations . state import StateApps
2019-02-02 23:53:55 +01:00
from boto . s3 . connection import S3Connection
2018-12-07 18:15:51 +01:00
from boto . s3 . bucket import Bucket
2014-01-27 23:43:02 +01:00
2017-10-28 22:52:40 +02:00
import zerver . lib . upload
2020-03-12 14:17:25 +01:00
from zerver . lib . actions import do_set_realm_property
2017-10-28 22:52:40 +02:00
from zerver . lib . upload import S3UploadBackend , LocalUploadBackend
2016-12-19 08:48:03 +01:00
from zerver . lib . avatar import avatar_url
2017-02-13 09:19:52 +01:00
from zerver . lib . cache import get_cache_backend
2014-01-27 22:53:36 +01:00
from zerver . lib . db import TimeTrackingCursor
from zerver . lib import cache
2016-11-27 06:56:06 +01:00
from zerver . tornado import event_queue
2016-11-27 06:36:06 +01:00
from zerver . tornado . handlers import allocate_handler_id
2014-01-27 22:53:36 +01:00
from zerver . worker import queue_processors
2019-01-11 01:26:11 +01:00
from zerver . lib . integrations import WEBHOOK_INTEGRATIONS
2020-01-23 12:21:55 +01:00
from zerver . views . auth import get_login_data
2014-01-27 22:53:36 +01:00
2014-01-27 23:43:02 +01:00
from zerver . models import (
2020-03-12 14:17:25 +01:00
get_realm ,
2016-01-24 02:29:35 +01:00
get_stream ,
2014-01-27 23:43:02 +01:00
Client ,
Message ,
2020-03-12 14:17:25 +01:00
Realm ,
2014-01-27 23:43:02 +01:00
Subscription ,
UserMessage ,
2016-06-03 02:10:13 +02:00
UserProfile ,
2014-01-27 23:43:02 +01:00
)
2019-07-30 20:58:48 +02:00
if TYPE_CHECKING :
2018-12-17 20:14:47 +01:00
# Avoid an import cycle; we only need these for type annotations.
2018-04-25 00:37:55 +02:00
from zerver . lib . test_classes import ZulipTestCase , MigrationsTestCase
2017-12-08 16:59:13 +01:00
2016-11-24 19:45:40 +01:00
import collections
2016-09-15 22:05:56 +02:00
import mock
2014-01-27 22:53:36 +01:00
import os
import re
2016-11-19 01:28:28 +01:00
import sys
2014-01-27 22:53:36 +01:00
import time
import ujson
2017-10-28 22:52:40 +02:00
from moto import mock_s3_deprecated
2014-01-27 22:53:36 +01:00
2016-12-13 10:59:54 +01:00
import fakeldap
import ldap
class MockLDAP ( fakeldap . MockLDAP ) :
class LDAPError ( ldap . LDAPError ) :
pass
class INVALID_CREDENTIALS ( ldap . INVALID_CREDENTIALS ) :
pass
class NO_SUCH_OBJECT ( ldap . NO_SUCH_OBJECT ) :
pass
class ALREADY_EXISTS ( ldap . ALREADY_EXISTS ) :
pass
2014-01-27 22:53:36 +01:00
2017-07-27 06:31:26 +02:00
@contextmanager
2017-11-05 11:15:10 +01:00
def stub_event_queue_user_events ( event_queue_return : Any , user_events_return : Any ) - > Iterator [ None ] :
2017-07-27 06:31:26 +02:00
with mock . patch ( ' zerver.lib.events.request_event_queue ' ,
return_value = event_queue_return ) :
with mock . patch ( ' zerver.lib.events.get_user_events ' ,
return_value = user_events_return ) :
yield
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def simulated_queue_client ( client : Callable [ . . . , Any ] ) - > Iterator [ None ] :
2014-01-27 22:53:36 +01:00
real_SimpleQueueClient = queue_processors . SimpleQueueClient
2017-05-17 21:15:50 +02:00
queue_processors . SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
yield
2017-05-17 21:15:50 +02:00
queue_processors . SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def tornado_redirected_to_list ( lst : List [ Mapping [ str , Any ] ] ) - > Iterator [ None ] :
2014-04-24 02:16:53 +02:00
real_event_queue_process_notification = event_queue . process_notification
2017-02-01 09:48:47 +01:00
event_queue . process_notification = lambda notice : lst . append ( notice )
# process_notification takes a single parameter called 'notice'.
# lst.append takes a single argument called 'object'.
# Some code might call process_notification using keyword arguments,
# so mypy doesn't allow assigning lst.append to process_notification
# So explicitly change parameter name to 'notice' to work around this problem
2014-01-27 22:53:36 +01:00
yield
2014-04-24 02:16:53 +02:00
event_queue . process_notification = real_event_queue_process_notification
2014-01-27 22:53:36 +01:00
2018-12-17 22:38:21 +01:00
class EventInfo :
def populate ( self , call_args_list : List [ Any ] ) - > None :
args = call_args_list [ 0 ] [ 0 ]
self . realm_id = args [ 0 ]
self . payload = args [ 1 ]
self . user_ids = args [ 2 ]
@contextmanager
def capture_event ( event_info : EventInfo ) - > Iterator [ None ] :
# Use this for simple endpoints that throw a single event
# in zerver.lib.actions.
with mock . patch ( ' zerver.lib.actions.send_event ' ) as m :
yield
if len ( m . call_args_list ) == 0 :
raise AssertionError ( ' No event was sent inside actions.py ' )
if len ( m . call_args_list ) > 1 :
raise AssertionError ( ' Too many events sent by action ' )
event_info . populate ( m . call_args_list )
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def simulated_empty_cache ( ) - > Generator [
2018-05-11 01:40:23 +02:00
List [ Tuple [ str , Union [ str , List [ str ] ] , str ] ] , None , None ] :
cache_queries = [ ] # type: List[Tuple[str, Union[str, List[str]], str]]
2016-11-29 07:22:02 +01:00
2018-05-11 01:40:23 +02:00
def my_cache_get ( key : str , cache_name : Optional [ str ] = None ) - > Optional [ Dict [ str , Any ] ] :
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' get ' , key , cache_name ) )
return None
2018-05-11 01:40:23 +02:00
def my_cache_get_many ( keys : List [ str ] , cache_name : Optional [ str ] = None ) - > Dict [ str , Any ] : # nocoverage -- simulated code doesn't use this
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' getmany ' , keys , cache_name ) )
2017-02-11 05:26:24 +01:00
return { }
2014-01-27 22:53:36 +01:00
old_get = cache . cache_get
old_get_many = cache . cache_get_many
cache . cache_get = my_cache_get
cache . cache_get_many = my_cache_get_many
yield cache_queries
cache . cache_get = old_get
cache . cache_get_many = old_get_many
@contextmanager
2017-11-05 11:15:10 +01:00
def queries_captured ( include_savepoints : Optional [ bool ] = False ) - > Generator [
List [ Dict [ str , Union [ str , bytes ] ] ] , None , None ] :
2014-01-27 22:53:36 +01:00
'''
Allow a user to capture just the queries executed during
the with statement .
'''
2017-11-09 09:03:33 +01:00
queries = [ ] # type: List[Dict[str, Union[str, bytes]]]
2014-01-27 22:53:36 +01:00
2017-11-05 11:15:10 +01:00
def wrapper_execute ( self : TimeTrackingCursor ,
2018-11-27 20:21:55 +01:00
action : Callable [ [ str , Iterable [ Any ] ] , None ] ,
sql : str ,
2017-11-05 11:15:10 +01:00
params : Iterable [ Any ] = ( ) ) - > None :
2017-02-13 09:19:52 +01:00
cache = get_cache_backend ( None )
cache . clear ( )
2014-01-27 22:53:36 +01:00
start = time . time ( )
try :
return action ( sql , params )
finally :
stop = time . time ( )
duration = stop - start
2016-10-26 16:17:25 +02:00
if include_savepoints or ( ' SAVEPOINT ' not in sql ) :
queries . append ( {
' sql ' : self . mogrify ( sql , params ) . decode ( ' utf-8 ' ) ,
2019-04-20 01:00:46 +02:00
' time ' : " %.3f " % ( duration , ) ,
2016-10-26 16:17:25 +02:00
} )
2014-01-27 22:53:36 +01:00
old_execute = TimeTrackingCursor . execute
old_executemany = TimeTrackingCursor . executemany
2018-11-27 20:21:55 +01:00
def cursor_execute ( self : TimeTrackingCursor , sql : str ,
2017-11-05 11:15:10 +01:00
params : Iterable [ Any ] = ( ) ) - > None :
2017-05-17 21:15:50 +02:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . execute , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor . execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
2018-11-27 20:21:55 +01:00
def cursor_executemany ( self : TimeTrackingCursor , sql : str ,
2017-11-05 11:15:10 +01:00
params : Iterable [ Any ] = ( ) ) - > None :
2017-05-17 21:15:50 +02:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . executemany , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167 # nocoverage -- doesn't actually get used in tests
TimeTrackingCursor . executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
yield queries
2017-05-17 21:15:50 +02:00
TimeTrackingCursor . execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor . executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
2017-02-09 22:58:43 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def stdout_suppressed ( ) - > Iterator [ IO [ str ] ] :
2017-02-09 22:58:43 +01:00
""" Redirect stdout to /dev/null. """
with open ( os . devnull , ' a ' ) as devnull :
2017-08-25 20:01:20 +02:00
stdout , sys . stdout = sys . stdout , devnull
2017-02-09 22:58:43 +01:00
yield stdout
sys . stdout = stdout
2020-03-12 14:17:25 +01:00
def reset_emails_in_zulip_realm ( ) - > None :
realm = get_realm ( ' zulip ' )
do_set_realm_property ( realm , ' email_address_visibility ' ,
Realm . EMAIL_ADDRESS_VISIBILITY_EVERYONE )
2017-11-05 11:15:10 +01:00
def get_test_image_file ( filename : str ) - > IO [ Any ] :
2016-12-19 08:48:03 +01:00
test_avatar_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , ' ../tests/images ' ) )
return open ( os . path . join ( test_avatar_dir , filename ) , ' rb ' )
2018-06-06 14:30:26 +02:00
def avatar_disk_path ( user_profile : UserProfile , medium : bool = False , original : bool = False ) - > str :
2016-12-19 08:48:03 +01:00
avatar_url_path = avatar_url ( user_profile , medium )
avatar_disk_path = os . path . join ( settings . LOCAL_UPLOADS_DIR , " avatars " ,
2017-03-03 00:15:05 +01:00
avatar_url_path . split ( " / " ) [ - 2 ] ,
2016-12-19 08:48:03 +01:00
avatar_url_path . split ( " / " ) [ - 1 ] . split ( " ? " ) [ 0 ] )
2018-06-06 14:30:26 +02:00
if original :
2019-02-14 13:37:32 +01:00
return avatar_disk_path . replace ( " .png " , " .original " )
2016-12-19 08:48:03 +01:00
return avatar_disk_path
2014-01-27 23:43:02 +01:00
2017-11-05 11:15:10 +01:00
def make_client ( name : str ) - > Client :
2016-09-13 23:32:35 +02:00
client , _ = Client . objects . get_or_create ( name = name )
return client
2018-05-11 01:40:23 +02:00
def find_key_by_email ( address : str ) - > Optional [ str ] :
2014-01-27 23:43:02 +01:00
from django . core . mail import outbox
2017-07-11 20:52:27 +02:00
key_regex = re . compile ( " accounts/do_confirm/([a-z0-9] {24} )> " )
2014-01-27 23:43:02 +01:00
for message in reversed ( outbox ) :
if address in message . to :
return key_regex . search ( message . body ) . groups ( ) [ 0 ]
2017-03-05 09:06:36 +01:00
return None # nocoverage -- in theory a test might want this case, but none do
2014-01-27 23:43:02 +01:00
2017-11-05 11:15:10 +01:00
def message_stream_count ( user_profile : UserProfile ) - > int :
2014-01-31 16:44:45 +01:00
return UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
count ( )
2017-11-05 11:15:10 +01:00
def most_recent_usermessage ( user_profile : UserProfile ) - > UserMessage :
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' -message ' )
2017-05-17 21:15:50 +02:00
return query [ 0 ] # Django does LIMIT here
2014-01-31 16:44:45 +01:00
2017-11-05 11:15:10 +01:00
def most_recent_message ( user_profile : UserProfile ) - > Message :
2014-01-31 16:44:45 +01:00
usermessage = most_recent_usermessage ( user_profile )
return usermessage . message
2018-05-11 01:40:23 +02:00
def get_subscription ( stream_name : str , user_profile : UserProfile ) - > Subscription :
2017-01-30 04:31:24 +01:00
stream = get_stream ( stream_name , user_profile . realm )
2020-02-18 17:25:43 +01:00
recipient_id = stream . recipient_id
2017-01-30 04:31:24 +01:00
return Subscription . objects . get ( user_profile = user_profile ,
2020-02-18 17:25:43 +01:00
recipient_id = recipient_id , active = True )
2017-01-30 04:31:24 +01:00
2017-11-05 11:15:10 +01:00
def get_user_messages ( user_profile : UserProfile ) - > List [ Message ] :
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' message ' )
return [ um . message for um in query ]
2017-11-05 11:37:41 +01:00
class DummyHandler :
2017-11-05 11:15:10 +01:00
def __init__ ( self ) - > None :
2016-11-27 06:36:06 +01:00
allocate_handler_id ( self ) # type: ignore # this is a testing mock
2014-01-31 16:44:45 +01:00
2017-11-05 11:37:41 +01:00
class POSTRequestMock :
2014-01-31 16:44:45 +01:00
method = " POST "
2017-11-05 11:15:10 +01:00
def __init__ ( self , post_data : Dict [ str , Any ] , user_profile : Optional [ UserProfile ] ) - > None :
2016-11-03 13:00:18 +01:00
self . GET = { } # type: Dict[str, Any]
2020-01-28 06:45:32 +01:00
# Convert any integer parameters passed into strings, even
# though of course the HTTP API would do so. Ideally, we'd
# get rid of this abstraction entirely and just use the HTTP
# API directly, but while it exists, we need this code.
self . POST = { } # type: Dict[str, str]
for key in post_data :
self . POST [ key ] = str ( post_data [ key ] )
2014-01-31 16:44:45 +01:00
self . user = user_profile
2016-07-14 01:28:40 +02:00
self . _tornado_handler = DummyHandler ( )
2017-05-17 21:15:50 +02:00
self . _log_data = { } # type: Dict[str, Any]
2014-01-31 16:44:45 +01:00
self . META = { ' PATH_INFO ' : ' test ' }
2017-08-04 02:11:50 +02:00
self . path = ' '
2014-01-27 23:43:02 +01:00
2017-11-05 11:37:41 +01:00
class HostRequestMock :
2016-09-28 06:06:21 +02:00
""" A mock request object where get_host() works. Useful for testing
routes that use Zulip ' s subdomains feature " " "
2016-11-29 07:22:02 +01:00
2018-05-11 01:40:23 +02:00
def __init__ ( self , user_profile : UserProfile = None , host : str = settings . EXTERNAL_HOST ) - > None :
2016-09-28 06:06:21 +02:00
self . host = host
2017-08-15 01:01:48 +02:00
self . GET = { } # type: Dict[str, Any]
self . POST = { } # type: Dict[str, Any]
self . META = { ' PATH_INFO ' : ' test ' }
2017-08-04 02:11:50 +02:00
self . path = ' '
2017-08-04 03:59:52 +02:00
self . user = user_profile
2017-08-16 04:47:03 +02:00
self . method = ' '
2017-08-29 07:40:56 +02:00
self . body = ' '
self . content_type = ' '
2016-09-28 06:06:21 +02:00
2018-05-11 01:40:23 +02:00
def get_host ( self ) - > str :
2016-09-28 06:06:21 +02:00
return self . host
2017-11-05 11:37:41 +01:00
class MockPythonResponse :
2019-05-04 17:54:18 +02:00
def __init__ ( self , text : str , status_code : int , headers : Optional [ Dict [ str , str ] ] = None ) - > None :
2016-12-13 04:20:33 +01:00
self . text = text
self . status_code = status_code
2019-05-04 17:54:18 +02:00
if headers is None :
headers = { ' content-type ' : ' text/html ' }
self . headers = headers
2016-12-13 04:20:33 +01:00
@property
2017-11-05 11:15:10 +01:00
def ok ( self ) - > bool :
2016-12-13 04:20:33 +01:00
return self . status_code == 200
2019-05-04 17:54:18 +02:00
def iter_content ( self , n : int ) - > Generator [ str , Any , None ] :
yield self . text [ : n ]
2016-07-28 01:40:28 +02:00
INSTRUMENTING = os . environ . get ( ' TEST_INSTRUMENT_URL_COVERAGE ' , ' ' ) == ' TRUE '
2017-05-17 21:15:50 +02:00
INSTRUMENTED_CALLS = [ ] # type: List[Dict[str, Any]]
2016-07-28 01:40:28 +02:00
2017-05-17 21:15:50 +02:00
UrlFuncT = Callable [ . . . , HttpResponse ] # TODO: make more specific
2016-09-12 03:06:25 +02:00
2017-11-05 11:15:10 +01:00
def append_instrumentation_data ( data : Dict [ str , Any ] ) - > None :
2017-02-10 05:42:41 +01:00
INSTRUMENTED_CALLS . append ( data )
2017-11-05 11:15:10 +01:00
def instrument_url ( f : UrlFuncT ) - > UrlFuncT :
2017-03-05 09:06:36 +01:00
if not INSTRUMENTING : # nocoverage -- option is always enabled; should we remove?
2016-07-28 01:40:28 +02:00
return f
else :
2018-05-16 01:08:15 +02:00
def wrapper ( self : ' ZulipTestCase ' , url : str , info : Dict [ str , Any ] = { } ,
2017-11-05 11:15:10 +01:00
* * kwargs : Any ) - > HttpResponse :
2016-07-28 01:40:28 +02:00
start = time . time ( )
result = f ( self , url , info , * * kwargs )
delay = time . time ( ) - start
test_name = self . id ( )
if ' ? ' in url :
url , extra_info = url . split ( ' ? ' , 1 )
else :
extra_info = ' '
2017-02-10 05:42:41 +01:00
append_instrumentation_data ( dict (
2016-07-28 01:40:28 +02:00
url = url ,
status_code = result . status_code ,
method = f . __name__ ,
delay = delay ,
extra_info = extra_info ,
info = info ,
test_name = test_name ,
kwargs = kwargs ) )
return result
return wrapper
2019-01-11 01:26:11 +01:00
def write_instrumentation_reports ( full_suite : bool , include_webhooks : bool ) - > None :
2016-07-28 01:40:28 +02:00
if INSTRUMENTING :
2016-07-28 02:40:04 +02:00
calls = INSTRUMENTED_CALLS
2016-11-24 19:45:40 +01:00
from zproject . urls import urlpatterns , v1_api_and_json_patterns
# Find our untested urls.
2017-05-17 21:15:50 +02:00
pattern_cnt = collections . defaultdict ( int ) # type: Dict[str, int]
2016-11-24 19:45:40 +01:00
2017-11-05 11:15:10 +01:00
def re_strip ( r : Any ) - > str :
2016-11-24 19:45:40 +01:00
return str ( r ) . lstrip ( ' ^ ' ) . rstrip ( ' $ ' )
2017-11-05 11:15:10 +01:00
def find_patterns ( patterns : List [ Any ] , prefixes : List [ str ] ) - > None :
2016-11-24 19:45:40 +01:00
for pattern in patterns :
find_pattern ( pattern , prefixes )
2017-11-05 11:15:10 +01:00
def cleanup_url ( url : str ) - > str :
2016-11-24 19:45:40 +01:00
if url . startswith ( ' / ' ) :
url = url [ 1 : ]
if url . startswith ( ' http://testserver/ ' ) :
url = url [ len ( ' http://testserver/ ' ) : ]
if url . startswith ( ' http://zulip.testserver/ ' ) :
url = url [ len ( ' http://zulip.testserver/ ' ) : ]
if url . startswith ( ' http://testserver:9080/ ' ) :
url = url [ len ( ' http://testserver:9080/ ' ) : ]
return url
2017-11-05 11:15:10 +01:00
def find_pattern ( pattern : Any , prefixes : List [ str ] ) - > None :
2016-11-24 19:45:40 +01:00
2018-02-02 05:43:18 +01:00
if isinstance ( pattern , type ( URLResolver ) ) :
2017-03-05 09:06:36 +01:00
return # nocoverage -- shouldn't actually happen
2016-11-24 19:45:40 +01:00
if hasattr ( pattern , ' url_patterns ' ) :
return
2018-02-02 05:43:18 +01:00
canon_pattern = prefixes [ 0 ] + re_strip ( pattern . pattern . regex . pattern )
2016-11-24 19:45:40 +01:00
cnt = 0
for call in calls :
if ' pattern ' in call :
continue
url = cleanup_url ( call [ ' url ' ] )
for prefix in prefixes :
if url . startswith ( prefix ) :
match_url = url [ len ( prefix ) : ]
2018-02-02 05:43:18 +01:00
if pattern . resolve ( match_url ) :
2016-11-24 19:45:40 +01:00
if call [ ' status_code ' ] in [ 200 , 204 , 301 , 302 ] :
cnt + = 1
call [ ' pattern ' ] = canon_pattern
pattern_cnt [ canon_pattern ] + = cnt
find_patterns ( urlpatterns , [ ' ' , ' en/ ' , ' de/ ' ] )
find_patterns ( v1_api_and_json_patterns , [ ' api/v1/ ' , ' json/ ' ] )
assert len ( pattern_cnt ) > 100
untested_patterns = set ( [ p for p in pattern_cnt if pattern_cnt [ p ] == 0 ] )
exempt_patterns = set ( [
2017-03-23 19:59:24 +01:00
# We exempt some patterns that are called via Tornado.
2016-11-24 19:45:40 +01:00
' api/v1/events ' ,
2018-07-13 12:58:16 +02:00
' api/v1/events/internal ' ,
2016-11-24 19:45:40 +01:00
' api/v1/register ' ,
2017-03-23 19:59:24 +01:00
# We also exempt some development environment debugging
# static content URLs, since the content they point to may
# or may not exist.
' coverage/(?P<path>.*) ' ,
2017-06-09 07:17:00 +02:00
' node-coverage/(?P<path>.*) ' ,
2017-03-23 19:59:24 +01:00
' docs/(?P<path>.*) ' ,
2018-06-03 14:03:03 +02:00
' casper/(?P<path>.*) ' ,
2019-07-18 11:27:16 +02:00
' static/(?P<path>.*) ' ,
2019-01-11 01:26:11 +01:00
] + [ webhook . url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks ] )
2016-11-24 19:45:40 +01:00
untested_patterns - = exempt_patterns
2017-05-17 21:15:50 +02:00
var_dir = ' var ' # TODO make sure path is robust here
2016-07-28 01:40:28 +02:00
fn = os . path . join ( var_dir , ' url_coverage.txt ' )
with open ( fn , ' w ' ) as f :
2016-07-28 02:40:04 +02:00
for call in calls :
2016-08-18 17:10:15 +02:00
try :
line = ujson . dumps ( call )
f . write ( line + ' \n ' )
2017-03-05 09:06:36 +01:00
except OverflowError : # nocoverage -- test suite error handling
2016-08-18 17:10:15 +02:00
print ( '''
A JSON overflow error was encountered while
producing the URL coverage report . Sometimes
this indicates that a test is passing objects
into methods like client_post ( ) , which is
unnecessary and leads to false positives .
''' )
print ( call )
2016-07-28 02:40:04 +02:00
2016-11-19 01:28:28 +01:00
if full_suite :
2016-11-30 01:40:05 +01:00
print ( ' INFO: URL coverage report is in %s ' % ( fn , ) )
print ( ' INFO: Try running: ./tools/create-test-api-docs ' )
2016-07-28 01:40:28 +02:00
2017-03-05 09:06:36 +01:00
if full_suite and len ( untested_patterns ) : # nocoverage -- test suite error handling
2016-11-19 01:28:28 +01:00
print ( " \n ERROR: Some URLs are untested! Here ' s the list of untested URLs: " )
2016-07-28 02:40:04 +02:00
for untested_pattern in sorted ( untested_patterns ) :
2016-11-19 01:28:28 +01:00
print ( " %s " % ( untested_pattern , ) )
sys . exit ( 1 )
2016-07-28 02:40:04 +02:00
2017-11-05 11:15:10 +01:00
def load_subdomain_token ( response : HttpResponse ) - > Dict [ str , Any ] :
2017-10-27 02:45:38 +02:00
assert isinstance ( response , HttpResponseRedirect )
token = response . url . rsplit ( ' / ' , 1 ) [ 1 ]
2020-01-23 12:21:55 +01:00
data = get_login_data ( token , should_delete = False )
assert data is not None
return data
2017-10-28 22:52:40 +02:00
FuncT = TypeVar ( ' FuncT ' , bound = Callable [ . . . , None ] )
2017-11-05 11:15:10 +01:00
def use_s3_backend ( method : FuncT ) - > FuncT :
2017-10-28 22:52:40 +02:00
@mock_s3_deprecated
@override_settings ( LOCAL_UPLOADS_DIR = None )
2017-11-05 11:15:10 +01:00
def new_method ( * args : Any , * * kwargs : Any ) - > Any :
2017-10-28 22:52:40 +02:00
zerver . lib . upload . upload_backend = S3UploadBackend ( )
try :
return method ( * args , * * kwargs )
finally :
zerver . lib . upload . upload_backend = LocalUploadBackend ( )
return new_method
2018-04-25 00:37:55 +02:00
2018-12-07 18:15:51 +01:00
def create_s3_buckets ( * bucket_names : Tuple [ str ] ) - > List [ Bucket ] :
conn = S3Connection ( settings . S3_KEY , settings . S3_SECRET_KEY )
buckets = [ conn . create_bucket ( name ) for name in bucket_names ]
return buckets
2019-05-13 07:04:31 +02:00
def use_db_models ( method : Callable [ . . . , None ] ) - > Callable [ . . . , None ] : # nocoverage
2018-04-25 00:37:55 +02:00
def method_patched_with_mock ( self : ' MigrationsTestCase ' , apps : StateApps ) - > None :
ArchivedAttachment = apps . get_model ( ' zerver ' , ' ArchivedAttachment ' )
ArchivedMessage = apps . get_model ( ' zerver ' , ' ArchivedMessage ' )
ArchivedUserMessage = apps . get_model ( ' zerver ' , ' ArchivedUserMessage ' )
Attachment = apps . get_model ( ' zerver ' , ' Attachment ' )
BotConfigData = apps . get_model ( ' zerver ' , ' BotConfigData ' )
BotStorageData = apps . get_model ( ' zerver ' , ' BotStorageData ' )
Client = apps . get_model ( ' zerver ' , ' Client ' )
CustomProfileField = apps . get_model ( ' zerver ' , ' CustomProfileField ' )
CustomProfileFieldValue = apps . get_model ( ' zerver ' , ' CustomProfileFieldValue ' )
DefaultStream = apps . get_model ( ' zerver ' , ' DefaultStream ' )
DefaultStreamGroup = apps . get_model ( ' zerver ' , ' DefaultStreamGroup ' )
EmailChangeStatus = apps . get_model ( ' zerver ' , ' EmailChangeStatus ' )
Huddle = apps . get_model ( ' zerver ' , ' Huddle ' )
Message = apps . get_model ( ' zerver ' , ' Message ' )
MultiuseInvite = apps . get_model ( ' zerver ' , ' MultiuseInvite ' )
MutedTopic = apps . get_model ( ' zerver ' , ' MutedTopic ' )
PreregistrationUser = apps . get_model ( ' zerver ' , ' PreregistrationUser ' )
PushDeviceToken = apps . get_model ( ' zerver ' , ' PushDeviceToken ' )
Reaction = apps . get_model ( ' zerver ' , ' Reaction ' )
Realm = apps . get_model ( ' zerver ' , ' Realm ' )
RealmAuditLog = apps . get_model ( ' zerver ' , ' RealmAuditLog ' )
RealmDomain = apps . get_model ( ' zerver ' , ' RealmDomain ' )
RealmEmoji = apps . get_model ( ' zerver ' , ' RealmEmoji ' )
RealmFilter = apps . get_model ( ' zerver ' , ' RealmFilter ' )
Recipient = apps . get_model ( ' zerver ' , ' Recipient ' )
2019-12-25 22:18:36 +01:00
Recipient . PERSONAL = 1
Recipient . STREAM = 2
Recipient . HUDDLE = 3
2018-04-25 00:37:55 +02:00
ScheduledEmail = apps . get_model ( ' zerver ' , ' ScheduledEmail ' )
ScheduledMessage = apps . get_model ( ' zerver ' , ' ScheduledMessage ' )
Service = apps . get_model ( ' zerver ' , ' Service ' )
Stream = apps . get_model ( ' zerver ' , ' Stream ' )
Subscription = apps . get_model ( ' zerver ' , ' Subscription ' )
UserActivity = apps . get_model ( ' zerver ' , ' UserActivity ' )
UserActivityInterval = apps . get_model ( ' zerver ' , ' UserActivityInterval ' )
UserGroup = apps . get_model ( ' zerver ' , ' UserGroup ' )
UserGroupMembership = apps . get_model ( ' zerver ' , ' UserGroupMembership ' )
UserHotspot = apps . get_model ( ' zerver ' , ' UserHotspot ' )
UserMessage = apps . get_model ( ' zerver ' , ' UserMessage ' )
UserPresence = apps . get_model ( ' zerver ' , ' UserPresence ' )
UserProfile = apps . get_model ( ' zerver ' , ' UserProfile ' )
zerver_models_patch = mock . patch . multiple (
' zerver.models ' ,
ArchivedAttachment = ArchivedAttachment ,
ArchivedMessage = ArchivedMessage ,
ArchivedUserMessage = ArchivedUserMessage ,
Attachment = Attachment ,
BotConfigData = BotConfigData ,
BotStorageData = BotStorageData ,
Client = Client ,
CustomProfileField = CustomProfileField ,
CustomProfileFieldValue = CustomProfileFieldValue ,
DefaultStream = DefaultStream ,
DefaultStreamGroup = DefaultStreamGroup ,
EmailChangeStatus = EmailChangeStatus ,
Huddle = Huddle ,
Message = Message ,
MultiuseInvite = MultiuseInvite ,
MutedTopic = MutedTopic ,
PreregistrationUser = PreregistrationUser ,
PushDeviceToken = PushDeviceToken ,
Reaction = Reaction ,
Realm = Realm ,
RealmAuditLog = RealmAuditLog ,
RealmDomain = RealmDomain ,
RealmEmoji = RealmEmoji ,
RealmFilter = RealmFilter ,
Recipient = Recipient ,
ScheduledEmail = ScheduledEmail ,
ScheduledMessage = ScheduledMessage ,
Service = Service ,
Stream = Stream ,
Subscription = Subscription ,
UserActivity = UserActivity ,
UserActivityInterval = UserActivityInterval ,
UserGroup = UserGroup ,
UserGroupMembership = UserGroupMembership ,
UserHotspot = UserHotspot ,
UserMessage = UserMessage ,
UserPresence = UserPresence ,
UserProfile = UserProfile
)
zerver_test_helpers_patch = mock . patch . multiple (
' zerver.lib.test_helpers ' ,
Client = Client ,
Message = Message ,
Subscription = Subscription ,
UserMessage = UserMessage ,
UserProfile = UserProfile ,
)
zerver_test_classes_patch = mock . patch . multiple (
' zerver.lib.test_classes ' ,
Client = Client ,
Message = Message ,
Realm = Realm ,
Recipient = Recipient ,
Stream = Stream ,
Subscription = Subscription ,
UserProfile = UserProfile ,
)
with zerver_models_patch , \
zerver_test_helpers_patch , \
zerver_test_classes_patch :
method ( self , apps )
return method_patched_with_mock
2019-08-04 20:45:24 +02:00
def create_dummy_file ( filename : str ) - > str :
filepath = os . path . join ( settings . TEST_WORKER_DIR , filename )
with open ( filepath , ' w ' ) as f :
f . write ( ' zulip! ' )
return filepath