2015-11-01 17:14:25 +01:00
|
|
|
from __future__ import absolute_import
|
2016-07-28 01:40:28 +02:00
|
|
|
from __future__ import print_function
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2016-11-24 19:12:55 +01:00
|
|
|
from typing import (cast, Any, Callable, Dict, Generator, Iterable, Iterator, List, Mapping,
|
2016-12-03 00:04:17 +01:00
|
|
|
Optional, Sized, Tuple, Union)
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2016-11-24 19:45:40 +01:00
|
|
|
from django.core.urlresolvers import LocaleRegexURLResolver
|
2016-09-23 04:23:48 +02:00
|
|
|
from django.conf import settings
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.test import TestCase
|
2016-07-14 00:21:01 +02:00
|
|
|
from django.test.client import (
|
|
|
|
BOUNDARY, MULTIPART_CONTENT, encode_multipart,
|
|
|
|
)
|
2016-05-20 14:53:47 +02:00
|
|
|
from django.template import loader
|
2016-06-03 02:10:13 +02:00
|
|
|
from django.http import HttpResponse
|
2016-10-21 22:13:23 +02:00
|
|
|
from django.db.utils import IntegrityError
|
2016-05-18 20:35:35 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2014-01-27 23:43:02 +01:00
|
|
|
|
|
|
|
from zerver.lib.initial_password import initial_password
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib.db import TimeTrackingCursor
|
2016-06-16 01:23:52 +02:00
|
|
|
from zerver.lib.str_utils import force_text
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.lib import cache
|
2016-11-27 06:56:06 +01:00
|
|
|
from zerver.tornado import event_queue
|
2016-11-27 06:36:06 +01:00
|
|
|
from zerver.tornado.handlers import allocate_handler_id
|
2014-01-27 22:53:36 +01:00
|
|
|
from zerver.worker import queue_processors
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
from zerver.lib.actions import (
|
2016-10-20 00:08:03 +02:00
|
|
|
check_send_message, create_stream_if_needed, bulk_add_subscriptions,
|
2016-10-20 16:49:29 +02:00
|
|
|
get_display_recipient, bulk_remove_subscriptions
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.models import (
|
2015-10-13 22:54:35 +02:00
|
|
|
get_realm,
|
2016-01-24 02:29:35 +01:00
|
|
|
get_stream,
|
2015-10-13 22:54:35 +02:00
|
|
|
get_user_profile_by_email,
|
2014-01-27 23:43:02 +01:00
|
|
|
Client,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
2016-06-03 02:10:13 +02:00
|
|
|
UserProfile,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
|
|
|
|
2016-05-18 20:35:35 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
|
|
|
|
2016-11-24 19:45:40 +01:00
|
|
|
import collections
|
2014-01-27 22:53:36 +01:00
|
|
|
import base64
|
2016-09-15 22:05:56 +02:00
|
|
|
import mock
|
2014-01-27 22:53:36 +01:00
|
|
|
import os
|
|
|
|
import re
|
2016-11-19 01:28:28 +01:00
|
|
|
import sys
|
2014-01-27 22:53:36 +01:00
|
|
|
import time
|
|
|
|
import ujson
|
2016-07-12 06:35:50 +02:00
|
|
|
import unittest
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2016-06-28 07:10:38 +02:00
|
|
|
from six import text_type, binary_type
|
|
|
|
from zerver.lib.str_utils import NonBinaryStr
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
from contextlib import contextmanager
|
2015-11-01 17:14:25 +01:00
|
|
|
import six
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_queue_client(client):
|
2016-11-24 19:12:55 +01:00
|
|
|
# type: (type) -> Iterator[None]
|
2014-01-27 22:53:36 +01:00
|
|
|
real_SimpleQueueClient = queue_processors.SimpleQueueClient
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = client # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2016-01-27 21:14:58 +01:00
|
|
|
queue_processors.SimpleQueueClient = real_SimpleQueueClient # type: ignore # https://github.com/JukkaL/mypy/issues/1152
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def tornado_redirected_to_list(lst):
|
2016-11-24 19:12:55 +01:00
|
|
|
# type: (List[Mapping[str, Any]]) -> Iterator[None]
|
2014-04-24 02:16:53 +02:00
|
|
|
real_event_queue_process_notification = event_queue.process_notification
|
|
|
|
event_queue.process_notification = lst.append
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2014-04-24 02:16:53 +02:00
|
|
|
event_queue.process_notification = real_event_queue_process_notification
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def simulated_empty_cache():
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: () -> Generator[List[Tuple[str, Union[text_type, List[text_type]], text_type]], None, None]
|
|
|
|
cache_queries = [] # type: List[Tuple[str, Union[text_type, List[text_type]], text_type]]
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
def my_cache_get(key, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (text_type, Optional[str]) -> Any
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('get', key, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
def my_cache_get_many(keys, cache_name=None):
|
2016-06-10 00:12:34 +02:00
|
|
|
# type: (List[text_type], Optional[str]) -> Dict[text_type, Any]
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('getmany', keys, cache_name))
|
|
|
|
return None
|
|
|
|
|
|
|
|
old_get = cache.cache_get
|
|
|
|
old_get_many = cache.cache_get_many
|
|
|
|
cache.cache_get = my_cache_get
|
|
|
|
cache.cache_get_many = my_cache_get_many
|
|
|
|
yield cache_queries
|
|
|
|
cache.cache_get = old_get
|
|
|
|
cache.cache_get_many = old_get_many
|
|
|
|
|
|
|
|
@contextmanager
|
2016-10-26 16:17:25 +02:00
|
|
|
def queries_captured(include_savepoints=False):
|
|
|
|
# type: (Optional[bool]) -> Generator[List[Dict[str, Union[str, binary_type]]], None, None]
|
2014-01-27 22:53:36 +01:00
|
|
|
'''
|
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
|
|
|
'''
|
|
|
|
|
2016-06-28 07:10:38 +02:00
|
|
|
queries = [] # type: List[Dict[str, Union[str, binary_type]]]
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
def wrapper_execute(self, action, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, Callable, NonBinaryStr, Iterable[Any]) -> None
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return action(sql, params)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
2016-10-26 16:17:25 +02:00
|
|
|
if include_savepoints or ('SAVEPOINT' not in sql):
|
|
|
|
queries.append({
|
|
|
|
'sql': self.mogrify(sql, params).decode('utf-8'),
|
|
|
|
'time': "%.3f" % duration,
|
|
|
|
})
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
old_execute = TimeTrackingCursor.execute
|
|
|
|
old_executemany = TimeTrackingCursor.executemany
|
|
|
|
|
|
|
|
def cursor_execute(self, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = cursor_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
def cursor_executemany(self, sql, params=()):
|
2016-06-28 07:10:38 +02:00
|
|
|
# type: (TimeTrackingCursor, NonBinaryStr, Iterable[Any]) -> None
|
2016-06-03 02:10:13 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).executemany, sql, params) # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.executemany = cursor_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
yield queries
|
|
|
|
|
2016-01-27 21:14:58 +01:00
|
|
|
TimeTrackingCursor.execute = old_execute # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
|
|
|
TimeTrackingCursor.executemany = old_executemany # type: ignore # https://github.com/JukkaL/mypy/issues/1167
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-13 23:32:35 +02:00
|
|
|
def make_client(name):
|
|
|
|
# type: (str) -> Client
|
|
|
|
client, _ = Client.objects.get_or_create(name=name)
|
|
|
|
return client
|
|
|
|
|
2014-01-27 23:43:02 +01:00
|
|
|
def find_key_by_email(address):
|
2016-06-04 19:50:38 +02:00
|
|
|
# type: (text_type) -> text_type
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
|
|
|
key_regex = re.compile("accounts/do_confirm/([a-f0-9]{40})>")
|
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
|
|
|
return key_regex.search(message.body).groups()[0]
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
def message_ids(result):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (Dict[str, Any]) -> Set[int]
|
2014-01-31 16:44:45 +01:00
|
|
|
return set(message['id'] for message in result['messages'])
|
|
|
|
|
|
|
|
def message_stream_count(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> int
|
2014-01-31 16:44:45 +01:00
|
|
|
return UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
count()
|
|
|
|
|
|
|
|
def most_recent_usermessage(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> UserMessage
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('-message')
|
|
|
|
return query[0] # Django does LIMIT here
|
|
|
|
|
|
|
|
def most_recent_message(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> Message
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
|
|
|
def get_user_messages(user_profile):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (UserProfile) -> List[Message]
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('message')
|
|
|
|
return [um.message for um in query]
|
|
|
|
|
|
|
|
class DummyHandler(object):
|
2016-07-14 01:28:40 +02:00
|
|
|
def __init__(self):
|
2016-12-03 19:50:14 +01:00
|
|
|
# type: () -> None
|
2016-11-27 06:36:06 +01:00
|
|
|
allocate_handler_id(self) # type: ignore # this is a testing mock
|
2014-01-31 16:44:45 +01:00
|
|
|
|
|
|
|
class POSTRequestMock(object):
|
|
|
|
method = "POST"
|
|
|
|
|
2016-07-14 01:28:40 +02:00
|
|
|
def __init__(self, post_data, user_profile):
|
|
|
|
# type: (Dict[str, Any], UserProfile) -> None
|
2016-11-03 13:00:18 +01:00
|
|
|
self.GET = {} # type: Dict[str, Any]
|
|
|
|
self.POST = post_data
|
2014-01-31 16:44:45 +01:00
|
|
|
self.user = user_profile
|
2016-07-14 01:28:40 +02:00
|
|
|
self._tornado_handler = DummyHandler()
|
2016-01-25 23:42:16 +01:00
|
|
|
self._log_data = {} # type: Dict[str, Any]
|
2014-01-31 16:44:45 +01:00
|
|
|
self.META = {'PATH_INFO': 'test'}
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2016-09-28 06:06:21 +02:00
|
|
|
class HostRequestMock(object):
|
|
|
|
"""A mock request object where get_host() works. Useful for testing
|
|
|
|
routes that use Zulip's subdomains feature"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2016-09-28 06:06:21 +02:00
|
|
|
def __init__(self, host=settings.EXTERNAL_HOST):
|
|
|
|
# type: (text_type) -> None
|
|
|
|
self.host = host
|
|
|
|
|
|
|
|
def get_host(self):
|
|
|
|
# type: () -> text_type
|
|
|
|
return self.host
|
|
|
|
|
2016-12-13 04:20:33 +01:00
|
|
|
class MockPythonResponse(object):
|
|
|
|
def __init__(self, text, status_code):
|
|
|
|
# type: (text_type, int) -> None
|
|
|
|
self.text = text
|
|
|
|
self.status_code = status_code
|
|
|
|
|
|
|
|
@property
|
|
|
|
def ok(self):
|
|
|
|
# type: () -> bool
|
|
|
|
return self.status_code == 200
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
INSTRUMENTING = os.environ.get('TEST_INSTRUMENT_URL_COVERAGE', '') == 'TRUE'
|
2016-09-12 17:29:34 +02:00
|
|
|
INSTRUMENTED_CALLS = [] # type: List[Dict[str, Any]]
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2016-09-12 03:06:25 +02:00
|
|
|
UrlFuncT = Callable[..., HttpResponse] # TODO: make more specific
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
def instrument_url(f):
|
2016-09-12 03:06:25 +02:00
|
|
|
# type: (UrlFuncT) -> UrlFuncT
|
2016-07-28 01:40:28 +02:00
|
|
|
if not INSTRUMENTING:
|
|
|
|
return f
|
|
|
|
else:
|
|
|
|
def wrapper(self, url, info={}, **kwargs):
|
2016-09-12 17:29:34 +02:00
|
|
|
# type: (Any, text_type, Dict[str, Any], **Any) -> HttpResponse
|
2016-07-28 01:40:28 +02:00
|
|
|
start = time.time()
|
|
|
|
result = f(self, url, info, **kwargs)
|
|
|
|
delay = time.time() - start
|
|
|
|
test_name = self.id()
|
|
|
|
if '?' in url:
|
|
|
|
url, extra_info = url.split('?', 1)
|
|
|
|
else:
|
|
|
|
extra_info = ''
|
|
|
|
|
|
|
|
INSTRUMENTED_CALLS.append(dict(
|
|
|
|
url=url,
|
|
|
|
status_code=result.status_code,
|
|
|
|
method=f.__name__,
|
|
|
|
delay=delay,
|
|
|
|
extra_info=extra_info,
|
|
|
|
info=info,
|
|
|
|
test_name=test_name,
|
|
|
|
kwargs=kwargs))
|
|
|
|
return result
|
|
|
|
return wrapper
|
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
def write_instrumentation_reports(full_suite):
|
|
|
|
# type: (bool) -> None
|
2016-07-28 01:40:28 +02:00
|
|
|
if INSTRUMENTING:
|
2016-07-28 02:40:04 +02:00
|
|
|
calls = INSTRUMENTED_CALLS
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
from zproject.urls import urlpatterns, v1_api_and_json_patterns
|
|
|
|
|
|
|
|
# Find our untested urls.
|
|
|
|
pattern_cnt = collections.defaultdict(int) # type: Dict[str, int]
|
|
|
|
|
|
|
|
def re_strip(r):
|
|
|
|
# type: (Any) -> str
|
|
|
|
return str(r).lstrip('^').rstrip('$')
|
|
|
|
|
|
|
|
def find_patterns(patterns, prefixes):
|
|
|
|
# type: (List[Any], List[str]) -> None
|
|
|
|
for pattern in patterns:
|
|
|
|
find_pattern(pattern, prefixes)
|
|
|
|
|
|
|
|
def cleanup_url(url):
|
|
|
|
# type: (str) -> str
|
|
|
|
if url.startswith('/'):
|
|
|
|
url = url[1:]
|
|
|
|
if url.startswith('http://testserver/'):
|
|
|
|
url = url[len('http://testserver/'):]
|
|
|
|
if url.startswith('http://zulip.testserver/'):
|
|
|
|
url = url[len('http://zulip.testserver/'):]
|
|
|
|
if url.startswith('http://testserver:9080/'):
|
|
|
|
url = url[len('http://testserver:9080/'):]
|
|
|
|
return url
|
|
|
|
|
|
|
|
def find_pattern(pattern, prefixes):
|
|
|
|
# type: (Any, List[str]) -> None
|
|
|
|
|
|
|
|
if isinstance(pattern, type(LocaleRegexURLResolver)):
|
|
|
|
return
|
|
|
|
|
|
|
|
if hasattr(pattern, 'url_patterns'):
|
|
|
|
return
|
|
|
|
|
|
|
|
canon_pattern = prefixes[0] + re_strip(pattern.regex.pattern)
|
|
|
|
cnt = 0
|
|
|
|
for call in calls:
|
|
|
|
if 'pattern' in call:
|
|
|
|
continue
|
|
|
|
|
|
|
|
url = cleanup_url(call['url'])
|
|
|
|
|
|
|
|
for prefix in prefixes:
|
|
|
|
if url.startswith(prefix):
|
|
|
|
match_url = url[len(prefix):]
|
|
|
|
if pattern.regex.match(match_url):
|
|
|
|
if call['status_code'] in [200, 204, 301, 302]:
|
|
|
|
cnt += 1
|
|
|
|
call['pattern'] = canon_pattern
|
|
|
|
pattern_cnt[canon_pattern] += cnt
|
|
|
|
|
|
|
|
find_patterns(urlpatterns, ['', 'en/', 'de/'])
|
|
|
|
find_patterns(v1_api_and_json_patterns, ['api/v1/', 'json/'])
|
|
|
|
|
|
|
|
assert len(pattern_cnt) > 100
|
|
|
|
untested_patterns = set([p for p in pattern_cnt if pattern_cnt[p] == 0])
|
|
|
|
|
|
|
|
# We exempt some patterns that are called via Tornado.
|
|
|
|
exempt_patterns = set([
|
|
|
|
'api/v1/events',
|
|
|
|
'api/v1/register',
|
|
|
|
])
|
|
|
|
|
|
|
|
untested_patterns -= exempt_patterns
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
var_dir = 'var' # TODO make sure path is robust here
|
|
|
|
fn = os.path.join(var_dir, 'url_coverage.txt')
|
|
|
|
with open(fn, 'w') as f:
|
2016-07-28 02:40:04 +02:00
|
|
|
for call in calls:
|
2016-08-18 17:10:15 +02:00
|
|
|
try:
|
|
|
|
line = ujson.dumps(call)
|
|
|
|
f.write(line + '\n')
|
|
|
|
except OverflowError:
|
|
|
|
print('''
|
|
|
|
A JSON overflow error was encountered while
|
|
|
|
producing the URL coverage report. Sometimes
|
|
|
|
this indicates that a test is passing objects
|
|
|
|
into methods like client_post(), which is
|
|
|
|
unnecessary and leads to false positives.
|
|
|
|
''')
|
|
|
|
print(call)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
if full_suite:
|
2016-11-30 01:40:05 +01:00
|
|
|
print('INFO: URL coverage report is in %s' % (fn,))
|
|
|
|
print('INFO: Try running: ./tools/create-test-api-docs')
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
if full_suite and len(untested_patterns):
|
|
|
|
print("\nERROR: Some URLs are untested! Here's the list of untested URLs:")
|
2016-07-28 02:40:04 +02:00
|
|
|
for untested_pattern in sorted(untested_patterns):
|
2016-11-19 01:28:28 +01:00
|
|
|
print(" %s" % (untested_pattern,))
|
|
|
|
sys.exit(1)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-05-20 14:53:47 +02:00
|
|
|
def get_all_templates():
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: () -> List[str]
|
2016-05-20 14:53:47 +02:00
|
|
|
templates = []
|
|
|
|
|
|
|
|
relpath = os.path.relpath
|
|
|
|
isfile = os.path.isfile
|
|
|
|
path_exists = os.path.exists
|
|
|
|
|
2016-07-04 10:15:50 +02:00
|
|
|
def is_valid_template(p, n):
|
2016-07-30 00:59:39 +02:00
|
|
|
# type: (text_type, text_type) -> bool
|
2016-12-01 00:57:57 +01:00
|
|
|
return (not n.startswith('.') and
|
|
|
|
not n.startswith('__init__') and
|
|
|
|
not n.endswith(".md") and
|
|
|
|
isfile(p))
|
2016-05-20 14:53:47 +02:00
|
|
|
|
|
|
|
def process(template_dir, dirname, fnames):
|
2016-06-03 02:10:13 +02:00
|
|
|
# type: (str, str, Iterable[str]) -> None
|
2016-05-20 14:53:47 +02:00
|
|
|
for name in fnames:
|
|
|
|
path = os.path.join(dirname, name)
|
|
|
|
if is_valid_template(path, name):
|
|
|
|
templates.append(relpath(path, template_dir))
|
|
|
|
|
|
|
|
for engine in loader.engines.all():
|
|
|
|
template_dirs = [d for d in engine.template_dirs if path_exists(d)]
|
|
|
|
for template_dir in template_dirs:
|
|
|
|
template_dir = os.path.normpath(template_dir)
|
2016-07-03 15:00:26 +02:00
|
|
|
for dirpath, dirnames, fnames in os.walk(template_dir):
|
|
|
|
process(template_dir, dirpath, fnames)
|
2016-05-20 14:53:47 +02:00
|
|
|
|
|
|
|
return templates
|