2016-05-18 20:35:35 +02:00
from contextlib import contextmanager
2017-10-28 22:52:40 +02:00
from typing import (
cast , Any , Callable , Dict , Generator , Iterable , Iterator , List , Mapping ,
Optional , Set , Sized , Tuple , Union , IO , Text , TypeVar
)
2016-01-25 20:38:44 +01:00
2017-04-19 10:04:23 +02:00
from django . core import signing
2016-11-24 19:45:40 +01:00
from django . core . urlresolvers import LocaleRegexURLResolver
2016-09-23 04:23:48 +02:00
from django . conf import settings
2017-10-28 22:52:40 +02:00
from django . test import TestCase , override_settings
2016-07-14 00:21:01 +02:00
from django . test . client import (
BOUNDARY , MULTIPART_CONTENT , encode_multipart ,
)
2016-05-20 14:53:47 +02:00
from django . template import loader
2017-10-27 02:45:38 +02:00
from django . http import HttpResponse , HttpResponseRedirect
2016-10-21 22:13:23 +02:00
from django . db . utils import IntegrityError
2014-01-27 23:43:02 +01:00
2017-10-28 22:52:40 +02:00
import zerver . lib . upload
from zerver . lib . upload import S3UploadBackend , LocalUploadBackend
2016-12-19 08:48:03 +01:00
from zerver . lib . avatar import avatar_url
2017-02-13 09:19:52 +01:00
from zerver . lib . cache import get_cache_backend
2014-01-27 23:43:02 +01:00
from zerver . lib . initial_password import initial_password
2014-01-27 22:53:36 +01:00
from zerver . lib . db import TimeTrackingCursor
2016-06-16 01:23:52 +02:00
from zerver . lib . str_utils import force_text
2014-01-27 22:53:36 +01:00
from zerver . lib import cache
2016-11-27 06:56:06 +01:00
from zerver . tornado import event_queue
2016-11-27 06:36:06 +01:00
from zerver . tornado . handlers import allocate_handler_id
2014-01-27 22:53:36 +01:00
from zerver . worker import queue_processors
2014-01-27 23:43:02 +01:00
from zerver . lib . actions import (
2016-10-20 00:08:03 +02:00
check_send_message , create_stream_if_needed , bulk_add_subscriptions ,
2017-10-28 20:26:11 +02:00
get_display_recipient , bulk_remove_subscriptions , get_stream_recipient ,
2014-01-27 23:43:02 +01:00
)
from zerver . models import (
2017-01-30 04:31:24 +01:00
get_recipient ,
2016-01-24 02:29:35 +01:00
get_stream ,
2017-05-24 02:42:31 +02:00
get_user ,
2014-01-27 23:43:02 +01:00
Client ,
Message ,
Realm ,
Recipient ,
Stream ,
Subscription ,
UserMessage ,
2016-06-03 02:10:13 +02:00
UserProfile ,
2014-01-27 23:43:02 +01:00
)
2016-05-18 20:35:35 +02:00
from zerver . lib . request import JsonableError
2017-12-08 16:59:13 +01:00
if False :
from zerver . lib . test_case import ZulipTestCase
2016-11-24 19:45:40 +01:00
import collections
2014-01-27 22:53:36 +01:00
import base64
2016-09-15 22:05:56 +02:00
import mock
2014-01-27 22:53:36 +01:00
import os
import re
2016-11-19 01:28:28 +01:00
import sys
2014-01-27 22:53:36 +01:00
import time
import ujson
2016-07-12 06:35:50 +02:00
import unittest
2017-11-05 05:30:31 +01:00
import urllib
2016-06-28 07:10:38 +02:00
from zerver . lib . str_utils import NonBinaryStr
2017-10-28 22:52:40 +02:00
from moto import mock_s3_deprecated
2014-01-27 22:53:36 +01:00
from contextlib import contextmanager
2016-12-13 10:59:54 +01:00
import fakeldap
import ldap
class MockLDAP ( fakeldap . MockLDAP ) :
class LDAPError ( ldap . LDAPError ) :
pass
class INVALID_CREDENTIALS ( ldap . INVALID_CREDENTIALS ) :
pass
class NO_SUCH_OBJECT ( ldap . NO_SUCH_OBJECT ) :
pass
class ALREADY_EXISTS ( ldap . ALREADY_EXISTS ) :
pass
2014-01-27 22:53:36 +01:00
2017-07-27 06:31:26 +02:00
@contextmanager
2017-11-05 11:15:10 +01:00
def stub_event_queue_user_events ( event_queue_return : Any , user_events_return : Any ) - > Iterator [ None ] :
2017-07-27 06:31:26 +02:00
with mock . patch ( ' zerver.lib.events.request_event_queue ' ,
return_value = event_queue_return ) :
with mock . patch ( ' zerver.lib.events.get_user_events ' ,
return_value = user_events_return ) :
yield
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def simulated_queue_client ( client : Callable [ . . . , Any ] ) - > Iterator [ None ] :
2014-01-27 22:53:36 +01:00
real_SimpleQueueClient = queue_processors . SimpleQueueClient
2017-05-17 21:15:50 +02:00
queue_processors . SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
yield
2017-05-17 21:15:50 +02:00
queue_processors . SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def tornado_redirected_to_list ( lst : List [ Mapping [ str , Any ] ] ) - > Iterator [ None ] :
2014-04-24 02:16:53 +02:00
real_event_queue_process_notification = event_queue . process_notification
2017-02-01 09:48:47 +01:00
event_queue . process_notification = lambda notice : lst . append ( notice )
# process_notification takes a single parameter called 'notice'.
# lst.append takes a single argument called 'object'.
# Some code might call process_notification using keyword arguments,
# so mypy doesn't allow assigning lst.append to process_notification
# So explicitly change parameter name to 'notice' to work around this problem
2014-01-27 22:53:36 +01:00
yield
2014-04-24 02:16:53 +02:00
event_queue . process_notification = real_event_queue_process_notification
2014-01-27 22:53:36 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def simulated_empty_cache ( ) - > Generator [
List [ Tuple [ str , Union [ Text , List [ Text ] ] , Text ] ] , None , None ] :
2017-05-17 21:15:50 +02:00
cache_queries = [ ] # type: List[Tuple[str, Union[Text, List[Text]], Text]]
2016-11-29 07:22:02 +01:00
2017-11-05 11:15:10 +01:00
def my_cache_get ( key : Text , cache_name : Optional [ str ] = None ) - > Optional [ Dict [ Text , Any ] ] :
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' get ' , key , cache_name ) )
return None
2017-03-05 09:06:36 +01:00
def my_cache_get_many ( keys , cache_name = None ) : # nocoverage -- simulated code doesn't use this
2016-12-21 13:17:53 +01:00
# type: (List[Text], Optional[str]) -> Dict[Text, Any]
2014-01-27 22:53:36 +01:00
cache_queries . append ( ( ' getmany ' , keys , cache_name ) )
2017-02-11 05:26:24 +01:00
return { }
2014-01-27 22:53:36 +01:00
old_get = cache . cache_get
old_get_many = cache . cache_get_many
cache . cache_get = my_cache_get
cache . cache_get_many = my_cache_get_many
yield cache_queries
cache . cache_get = old_get
cache . cache_get_many = old_get_many
@contextmanager
2017-11-05 11:15:10 +01:00
def queries_captured ( include_savepoints : Optional [ bool ] = False ) - > Generator [
List [ Dict [ str , Union [ str , bytes ] ] ] , None , None ] :
2014-01-27 22:53:36 +01:00
'''
Allow a user to capture just the queries executed during
the with statement .
'''
2017-11-09 09:03:33 +01:00
queries = [ ] # type: List[Dict[str, Union[str, bytes]]]
2014-01-27 22:53:36 +01:00
2017-11-05 11:15:10 +01:00
def wrapper_execute ( self : TimeTrackingCursor ,
action : Callable [ [ NonBinaryStr , Iterable [ Any ] ] , None ] ,
sql : NonBinaryStr ,
params : Iterable [ Any ] = ( ) ) - > None :
2017-02-13 09:19:52 +01:00
cache = get_cache_backend ( None )
cache . clear ( )
2014-01-27 22:53:36 +01:00
start = time . time ( )
try :
return action ( sql , params )
finally :
stop = time . time ( )
duration = stop - start
2016-10-26 16:17:25 +02:00
if include_savepoints or ( ' SAVEPOINT ' not in sql ) :
queries . append ( {
' sql ' : self . mogrify ( sql , params ) . decode ( ' utf-8 ' ) ,
' time ' : " %.3f " % duration ,
} )
2014-01-27 22:53:36 +01:00
old_execute = TimeTrackingCursor . execute
old_executemany = TimeTrackingCursor . executemany
2017-11-05 11:15:10 +01:00
def cursor_execute ( self : TimeTrackingCursor , sql : NonBinaryStr ,
params : Iterable [ Any ] = ( ) ) - > None :
2017-05-17 21:15:50 +02:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . execute , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor . execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
2017-11-05 11:15:10 +01:00
def cursor_executemany ( self : TimeTrackingCursor , sql : NonBinaryStr ,
params : Iterable [ Any ] = ( ) ) - > None :
2017-05-17 21:15:50 +02:00
return wrapper_execute ( self , super ( TimeTrackingCursor , self ) . executemany , sql , params ) # type: ignore # https://github.com/JukkaL/mypy/issues/1167 # nocoverage -- doesn't actually get used in tests
TimeTrackingCursor . executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
yield queries
2017-05-17 21:15:50 +02:00
TimeTrackingCursor . execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
TimeTrackingCursor . executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
2014-01-27 22:53:36 +01:00
2017-02-09 22:58:43 +01:00
@contextmanager
2017-11-05 11:15:10 +01:00
def stdout_suppressed ( ) - > Iterator [ IO [ str ] ] :
2017-02-09 22:58:43 +01:00
""" Redirect stdout to /dev/null. """
with open ( os . devnull , ' a ' ) as devnull :
2017-08-25 20:01:20 +02:00
stdout , sys . stdout = sys . stdout , devnull
2017-02-09 22:58:43 +01:00
yield stdout
sys . stdout = stdout
2017-11-05 11:15:10 +01:00
def get_test_image_file ( filename : str ) - > IO [ Any ] :
2016-12-19 08:48:03 +01:00
test_avatar_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , ' ../tests/images ' ) )
return open ( os . path . join ( test_avatar_dir , filename ) , ' rb ' )
2017-11-05 11:15:10 +01:00
def avatar_disk_path ( user_profile : UserProfile , medium : bool = False ) - > Text :
2016-12-19 08:48:03 +01:00
avatar_url_path = avatar_url ( user_profile , medium )
avatar_disk_path = os . path . join ( settings . LOCAL_UPLOADS_DIR , " avatars " ,
2017-03-03 00:15:05 +01:00
avatar_url_path . split ( " / " ) [ - 2 ] ,
2016-12-19 08:48:03 +01:00
avatar_url_path . split ( " / " ) [ - 1 ] . split ( " ? " ) [ 0 ] )
return avatar_disk_path
2014-01-27 23:43:02 +01:00
2017-11-05 11:15:10 +01:00
def make_client ( name : str ) - > Client :
2016-09-13 23:32:35 +02:00
client , _ = Client . objects . get_or_create ( name = name )
return client
2017-11-05 11:15:10 +01:00
def find_key_by_email ( address : Text ) - > Optional [ Text ] :
2014-01-27 23:43:02 +01:00
from django . core . mail import outbox
2017-07-11 20:52:27 +02:00
key_regex = re . compile ( " accounts/do_confirm/([a-z0-9] {24} )> " )
2014-01-27 23:43:02 +01:00
for message in reversed ( outbox ) :
if address in message . to :
return key_regex . search ( message . body ) . groups ( ) [ 0 ]
2017-03-05 09:06:36 +01:00
return None # nocoverage -- in theory a test might want this case, but none do
2014-01-27 23:43:02 +01:00
2017-11-05 11:15:10 +01:00
def message_stream_count ( user_profile : UserProfile ) - > int :
2014-01-31 16:44:45 +01:00
return UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
count ( )
2017-11-05 11:15:10 +01:00
def most_recent_usermessage ( user_profile : UserProfile ) - > UserMessage :
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' -message ' )
2017-05-17 21:15:50 +02:00
return query [ 0 ] # Django does LIMIT here
2014-01-31 16:44:45 +01:00
2017-11-05 11:15:10 +01:00
def most_recent_message ( user_profile : UserProfile ) - > Message :
2014-01-31 16:44:45 +01:00
usermessage = most_recent_usermessage ( user_profile )
return usermessage . message
2017-11-05 11:15:10 +01:00
def get_subscription ( stream_name : Text , user_profile : UserProfile ) - > Subscription :
2017-01-30 04:31:24 +01:00
stream = get_stream ( stream_name , user_profile . realm )
2017-10-28 20:26:11 +02:00
recipient = get_stream_recipient ( stream . id )
2017-01-30 04:31:24 +01:00
return Subscription . objects . get ( user_profile = user_profile ,
recipient = recipient , active = True )
2017-11-05 11:15:10 +01:00
def get_user_messages ( user_profile : UserProfile ) - > List [ Message ] :
2014-01-31 16:44:45 +01:00
query = UserMessage . objects . \
select_related ( " message " ) . \
filter ( user_profile = user_profile ) . \
order_by ( ' message ' )
return [ um . message for um in query ]
2017-11-05 11:37:41 +01:00
class DummyHandler :
2017-11-05 11:15:10 +01:00
def __init__ ( self ) - > None :
2016-11-27 06:36:06 +01:00
allocate_handler_id ( self ) # type: ignore # this is a testing mock
2014-01-31 16:44:45 +01:00
2017-11-05 11:37:41 +01:00
class POSTRequestMock :
2014-01-31 16:44:45 +01:00
method = " POST "
2017-11-05 11:15:10 +01:00
def __init__ ( self , post_data : Dict [ str , Any ] , user_profile : Optional [ UserProfile ] ) - > None :
2016-11-03 13:00:18 +01:00
self . GET = { } # type: Dict[str, Any]
self . POST = post_data
2014-01-31 16:44:45 +01:00
self . user = user_profile
2016-07-14 01:28:40 +02:00
self . _tornado_handler = DummyHandler ( )
2017-05-17 21:15:50 +02:00
self . _log_data = { } # type: Dict[str, Any]
2014-01-31 16:44:45 +01:00
self . META = { ' PATH_INFO ' : ' test ' }
2017-08-04 02:11:50 +02:00
self . path = ' '
2014-01-27 23:43:02 +01:00
2017-11-05 11:37:41 +01:00
class HostRequestMock :
2016-09-28 06:06:21 +02:00
""" A mock request object where get_host() works. Useful for testing
routes that use Zulip ' s subdomains feature " " "
2016-11-29 07:22:02 +01:00
2017-11-05 11:15:10 +01:00
def __init__ ( self , user_profile : UserProfile = None , host : Text = settings . EXTERNAL_HOST ) - > None :
2016-09-28 06:06:21 +02:00
self . host = host
2017-08-15 01:01:48 +02:00
self . GET = { } # type: Dict[str, Any]
self . POST = { } # type: Dict[str, Any]
self . META = { ' PATH_INFO ' : ' test ' }
2017-08-04 02:11:50 +02:00
self . path = ' '
2017-08-04 03:59:52 +02:00
self . user = user_profile
2017-08-16 04:47:03 +02:00
self . method = ' '
2017-08-29 07:40:56 +02:00
self . body = ' '
self . content_type = ' '
self . _email = ' '
2016-09-28 06:06:21 +02:00
2017-11-05 11:15:10 +01:00
def get_host ( self ) - > Text :
2016-09-28 06:06:21 +02:00
return self . host
2017-11-05 11:37:41 +01:00
class MockPythonResponse :
2017-11-05 11:15:10 +01:00
def __init__ ( self , text : Text , status_code : int ) - > None :
2016-12-13 04:20:33 +01:00
self . text = text
self . status_code = status_code
@property
2017-11-05 11:15:10 +01:00
def ok ( self ) - > bool :
2016-12-13 04:20:33 +01:00
return self . status_code == 200
2016-07-28 01:40:28 +02:00
INSTRUMENTING = os . environ . get ( ' TEST_INSTRUMENT_URL_COVERAGE ' , ' ' ) == ' TRUE '
2017-05-17 21:15:50 +02:00
INSTRUMENTED_CALLS = [ ] # type: List[Dict[str, Any]]
2016-07-28 01:40:28 +02:00
2017-05-17 21:15:50 +02:00
UrlFuncT = Callable [ . . . , HttpResponse ] # TODO: make more specific
2016-09-12 03:06:25 +02:00
2017-11-05 11:15:10 +01:00
def append_instrumentation_data ( data : Dict [ str , Any ] ) - > None :
2017-02-10 05:42:41 +01:00
INSTRUMENTED_CALLS . append ( data )
2017-11-05 11:15:10 +01:00
def instrument_url ( f : UrlFuncT ) - > UrlFuncT :
2017-03-05 09:06:36 +01:00
if not INSTRUMENTING : # nocoverage -- option is always enabled; should we remove?
2016-07-28 01:40:28 +02:00
return f
else :
2017-12-08 16:59:13 +01:00
def wrapper ( self : ' ZulipTestCase ' , url : Text , info : Dict [ str , Any ] = { } ,
2017-11-05 11:15:10 +01:00
* * kwargs : Any ) - > HttpResponse :
2016-07-28 01:40:28 +02:00
start = time . time ( )
result = f ( self , url , info , * * kwargs )
delay = time . time ( ) - start
test_name = self . id ( )
if ' ? ' in url :
url , extra_info = url . split ( ' ? ' , 1 )
else :
extra_info = ' '
2017-02-10 05:42:41 +01:00
append_instrumentation_data ( dict (
2016-07-28 01:40:28 +02:00
url = url ,
status_code = result . status_code ,
method = f . __name__ ,
delay = delay ,
extra_info = extra_info ,
info = info ,
test_name = test_name ,
kwargs = kwargs ) )
return result
return wrapper
2017-11-05 11:15:10 +01:00
def write_instrumentation_reports ( full_suite : bool ) - > None :
2016-07-28 01:40:28 +02:00
if INSTRUMENTING :
2016-07-28 02:40:04 +02:00
calls = INSTRUMENTED_CALLS
2016-11-24 19:45:40 +01:00
from zproject . urls import urlpatterns , v1_api_and_json_patterns
# Find our untested urls.
2017-05-17 21:15:50 +02:00
pattern_cnt = collections . defaultdict ( int ) # type: Dict[str, int]
2016-11-24 19:45:40 +01:00
2017-11-05 11:15:10 +01:00
def re_strip ( r : Any ) - > str :
2016-11-24 19:45:40 +01:00
return str ( r ) . lstrip ( ' ^ ' ) . rstrip ( ' $ ' )
2017-11-05 11:15:10 +01:00
def find_patterns ( patterns : List [ Any ] , prefixes : List [ str ] ) - > None :
2016-11-24 19:45:40 +01:00
for pattern in patterns :
find_pattern ( pattern , prefixes )
2017-11-05 11:15:10 +01:00
def cleanup_url ( url : str ) - > str :
2016-11-24 19:45:40 +01:00
if url . startswith ( ' / ' ) :
url = url [ 1 : ]
if url . startswith ( ' http://testserver/ ' ) :
url = url [ len ( ' http://testserver/ ' ) : ]
if url . startswith ( ' http://zulip.testserver/ ' ) :
url = url [ len ( ' http://zulip.testserver/ ' ) : ]
if url . startswith ( ' http://testserver:9080/ ' ) :
url = url [ len ( ' http://testserver:9080/ ' ) : ]
return url
2017-11-05 11:15:10 +01:00
def find_pattern ( pattern : Any , prefixes : List [ str ] ) - > None :
2016-11-24 19:45:40 +01:00
if isinstance ( pattern , type ( LocaleRegexURLResolver ) ) :
2017-03-05 09:06:36 +01:00
return # nocoverage -- shouldn't actually happen
2016-11-24 19:45:40 +01:00
if hasattr ( pattern , ' url_patterns ' ) :
return
canon_pattern = prefixes [ 0 ] + re_strip ( pattern . regex . pattern )
cnt = 0
for call in calls :
if ' pattern ' in call :
continue
url = cleanup_url ( call [ ' url ' ] )
for prefix in prefixes :
if url . startswith ( prefix ) :
match_url = url [ len ( prefix ) : ]
if pattern . regex . match ( match_url ) :
if call [ ' status_code ' ] in [ 200 , 204 , 301 , 302 ] :
cnt + = 1
call [ ' pattern ' ] = canon_pattern
pattern_cnt [ canon_pattern ] + = cnt
find_patterns ( urlpatterns , [ ' ' , ' en/ ' , ' de/ ' ] )
find_patterns ( v1_api_and_json_patterns , [ ' api/v1/ ' , ' json/ ' ] )
assert len ( pattern_cnt ) > 100
untested_patterns = set ( [ p for p in pattern_cnt if pattern_cnt [ p ] == 0 ] )
exempt_patterns = set ( [
2017-03-23 19:59:24 +01:00
# We exempt some patterns that are called via Tornado.
2016-11-24 19:45:40 +01:00
' api/v1/events ' ,
' api/v1/register ' ,
2017-03-23 19:59:24 +01:00
# We also exempt some development environment debugging
# static content URLs, since the content they point to may
# or may not exist.
' coverage/(?P<path>.*) ' ,
2017-06-09 07:17:00 +02:00
' node-coverage/(?P<path>.*) ' ,
2017-03-23 19:59:24 +01:00
' docs/(?P<path>.*) ' ,
2016-11-24 19:45:40 +01:00
] )
untested_patterns - = exempt_patterns
2017-05-17 21:15:50 +02:00
var_dir = ' var ' # TODO make sure path is robust here
2016-07-28 01:40:28 +02:00
fn = os . path . join ( var_dir , ' url_coverage.txt ' )
with open ( fn , ' w ' ) as f :
2016-07-28 02:40:04 +02:00
for call in calls :
2016-08-18 17:10:15 +02:00
try :
line = ujson . dumps ( call )
f . write ( line + ' \n ' )
2017-03-05 09:06:36 +01:00
except OverflowError : # nocoverage -- test suite error handling
2016-08-18 17:10:15 +02:00
print ( '''
A JSON overflow error was encountered while
producing the URL coverage report . Sometimes
this indicates that a test is passing objects
into methods like client_post ( ) , which is
unnecessary and leads to false positives .
''' )
print ( call )
2016-07-28 02:40:04 +02:00
2016-11-19 01:28:28 +01:00
if full_suite :
2016-11-30 01:40:05 +01:00
print ( ' INFO: URL coverage report is in %s ' % ( fn , ) )
print ( ' INFO: Try running: ./tools/create-test-api-docs ' )
2016-07-28 01:40:28 +02:00
2017-03-05 09:06:36 +01:00
if full_suite and len ( untested_patterns ) : # nocoverage -- test suite error handling
2016-11-19 01:28:28 +01:00
print ( " \n ERROR: Some URLs are untested! Here ' s the list of untested URLs: " )
2016-07-28 02:40:04 +02:00
for untested_pattern in sorted ( untested_patterns ) :
2016-11-19 01:28:28 +01:00
print ( " %s " % ( untested_pattern , ) )
sys . exit ( 1 )
2016-07-28 02:40:04 +02:00
2017-11-05 11:15:10 +01:00
def get_all_templates ( ) - > List [ str ] :
2016-05-20 14:53:47 +02:00
templates = [ ]
relpath = os . path . relpath
isfile = os . path . isfile
path_exists = os . path . exists
2017-11-05 11:15:10 +01:00
def is_valid_template ( p : Text , n : Text ) - > bool :
2016-11-26 00:25:05 +01:00
return ' webhooks ' not in p \
and not n . startswith ( ' . ' ) \
and not n . startswith ( ' __init__ ' ) \
and not n . endswith ( ' .md ' ) \
2017-09-16 13:58:17 +02:00
and not n . endswith ( ' .source.html ' ) \
2016-11-26 00:25:05 +01:00
and isfile ( p )
2016-05-20 14:53:47 +02:00
2017-11-05 11:15:10 +01:00
def process ( template_dir : str , dirname : str , fnames : Iterable [ str ] ) - > None :
2016-05-20 14:53:47 +02:00
for name in fnames :
path = os . path . join ( dirname , name )
if is_valid_template ( path , name ) :
templates . append ( relpath ( path , template_dir ) )
for engine in loader . engines . all ( ) :
template_dirs = [ d for d in engine . template_dirs if path_exists ( d ) ]
for template_dir in template_dirs :
template_dir = os . path . normpath ( template_dir )
2016-07-03 15:00:26 +02:00
for dirpath , dirnames , fnames in os . walk ( template_dir ) :
process ( template_dir , dirpath , fnames )
2016-05-20 14:53:47 +02:00
return templates
2017-04-19 10:04:23 +02:00
2017-11-05 11:15:10 +01:00
def load_subdomain_token ( response : HttpResponse ) - > Dict [ str , Any ] :
2017-10-27 02:45:38 +02:00
assert isinstance ( response , HttpResponseRedirect )
token = response . url . rsplit ( ' / ' , 1 ) [ 1 ]
return signing . loads ( token , salt = ' zerver.views.auth.log_into_subdomain ' )
2017-10-28 22:52:40 +02:00
FuncT = TypeVar ( ' FuncT ' , bound = Callable [ . . . , None ] )
2017-11-05 11:15:10 +01:00
def use_s3_backend ( method : FuncT ) - > FuncT :
2017-10-28 22:52:40 +02:00
@mock_s3_deprecated
@override_settings ( LOCAL_UPLOADS_DIR = None )
2017-11-05 11:15:10 +01:00
def new_method ( * args : Any , * * kwargs : Any ) - > Any :
2017-10-28 22:52:40 +02:00
zerver . lib . upload . upload_backend = S3UploadBackend ( )
try :
return method ( * args , * * kwargs )
finally :
zerver . lib . upload . upload_backend = LocalUploadBackend ( )
return new_method