2020-06-19 00:32:55 +02:00
|
|
|
import collections
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import time
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2017-10-28 22:52:40 +02:00
|
|
|
from typing import (
|
2020-06-11 00:54:34 +02:00
|
|
|
IO,
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Generator,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
2017-10-28 22:52:40 +02:00
|
|
|
)
|
2020-06-19 00:32:55 +02:00
|
|
|
from unittest import mock
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
import boto3
|
2020-06-19 00:32:55 +02:00
|
|
|
import fakeldap
|
|
|
|
import ldap
|
|
|
|
import ujson
|
2018-12-07 17:52:01 +01:00
|
|
|
from boto3.resources.base import ServiceResource
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.db.migrations.state import StateApps
|
|
|
|
from django.http import HttpResponse, HttpResponseRedirect
|
|
|
|
from django.test import override_settings
|
|
|
|
from django.urls import URLResolver
|
2020-06-19 00:32:55 +02:00
|
|
|
from moto import mock_s3
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2017-10-28 22:52:40 +02:00
|
|
|
import zerver.lib.upload
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib import cache
|
2020-03-12 14:17:25 +01:00
|
|
|
from zerver.lib.actions import do_set_realm_property
|
2016-12-19 08:48:03 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2017-02-13 09:19:52 +01:00
|
|
|
from zerver.lib.cache import get_cache_backend
|
2020-05-04 02:36:15 +02:00
|
|
|
from zerver.lib.db import Params, ParamsT, Query, TimeTrackingCursor
|
2019-01-11 01:26:11 +01:00
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.upload import LocalUploadBackend, S3UploadBackend
|
2014-01-27 23:43:02 +01:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
Message,
|
2020-03-12 14:17:25 +01:00
|
|
|
Realm,
|
2014-01-27 23:43:02 +01:00
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
2016-06-03 02:10:13 +02:00
|
|
|
UserProfile,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_realm,
|
|
|
|
get_stream,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.tornado import event_queue
|
2020-07-01 00:43:55 +02:00
|
|
|
from zerver.tornado.handlers import AsyncDjangoHandler, allocate_handler_id
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.worker import queue_processors
|
|
|
|
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
|
2020-02-23 18:58:08 +01:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
if TYPE_CHECKING:
|
2018-12-17 20:14:47 +01:00
|
|
|
# Avoid an import cycle; we only need these for type annotations.
|
2020-06-19 00:32:55 +02:00
|
|
|
from zerver.lib.test_classes import MigrationsTestCase, ZulipTestCase
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-12-13 10:59:54 +01:00
|
|
|
|
|
|
|
class MockLDAP(fakeldap.MockLDAP):
|
|
|
|
class LDAPError(ldap.LDAPError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class INVALID_CREDENTIALS(ldap.INVALID_CREDENTIALS):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class NO_SUCH_OBJECT(ldap.NO_SUCH_OBJECT):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class ALREADY_EXISTS(ldap.ALREADY_EXISTS):
|
|
|
|
pass
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def stub_event_queue_user_events(event_queue_return: Any, user_events_return: Any) -> Iterator[None]:
|
2017-07-27 06:31:26 +02:00
|
|
|
with mock.patch('zerver.lib.events.request_event_queue',
|
|
|
|
return_value=event_queue_return):
|
|
|
|
with mock.patch('zerver.lib.events.get_user_events',
|
|
|
|
return_value=user_events_return):
|
|
|
|
yield
|
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def simulated_queue_client(client: Callable[..., Any]) -> Iterator[None]:
|
2020-07-01 00:43:55 +02:00
|
|
|
with mock.patch.object(queue_processors, 'SimpleQueueClient', client):
|
|
|
|
yield
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def tornado_redirected_to_list(lst: List[Mapping[str, Any]]) -> Iterator[None]:
|
2014-04-24 02:16:53 +02:00
|
|
|
real_event_queue_process_notification = event_queue.process_notification
|
2017-02-01 09:48:47 +01:00
|
|
|
event_queue.process_notification = lambda notice: lst.append(notice)
|
|
|
|
# process_notification takes a single parameter called 'notice'.
|
|
|
|
# lst.append takes a single argument called 'object'.
|
|
|
|
# Some code might call process_notification using keyword arguments,
|
|
|
|
# so mypy doesn't allow assigning lst.append to process_notification
|
|
|
|
# So explicitly change parameter name to 'notice' to work around this problem
|
2014-01-27 22:53:36 +01:00
|
|
|
yield
|
2014-04-24 02:16:53 +02:00
|
|
|
event_queue.process_notification = real_event_queue_process_notification
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2018-12-17 22:38:21 +01:00
|
|
|
class EventInfo:
|
|
|
|
def populate(self, call_args_list: List[Any]) -> None:
|
|
|
|
args = call_args_list[0][0]
|
|
|
|
self.realm_id = args[0]
|
|
|
|
self.payload = args[1]
|
|
|
|
self.user_ids = args[2]
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def capture_event(event_info: EventInfo) -> Iterator[None]:
|
|
|
|
# Use this for simple endpoints that throw a single event
|
|
|
|
# in zerver.lib.actions.
|
|
|
|
with mock.patch('zerver.lib.actions.send_event') as m:
|
|
|
|
yield
|
|
|
|
|
|
|
|
if len(m.call_args_list) == 0:
|
|
|
|
raise AssertionError('No event was sent inside actions.py')
|
|
|
|
|
|
|
|
if len(m.call_args_list) > 1:
|
|
|
|
raise AssertionError('Too many events sent by action')
|
|
|
|
|
|
|
|
event_info.populate(m.call_args_list)
|
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2020-07-05 01:58:06 +02:00
|
|
|
def simulated_empty_cache() -> Iterator[List[Tuple[str, Union[str, List[str]], Optional[str]]]]:
|
|
|
|
cache_queries: List[Tuple[str, Union[str, List[str]], Optional[str]]] = []
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def my_cache_get(key: str, cache_name: Optional[str]=None) -> Optional[Dict[str, Any]]:
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('get', key, cache_name))
|
|
|
|
return None
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def my_cache_get_many(keys: List[str], cache_name: Optional[str]=None) -> Dict[str, Any]: # nocoverage -- simulated code doesn't use this
|
2014-01-27 22:53:36 +01:00
|
|
|
cache_queries.append(('getmany', keys, cache_name))
|
2017-02-11 05:26:24 +01:00
|
|
|
return {}
|
2014-01-27 22:53:36 +01:00
|
|
|
|
|
|
|
old_get = cache.cache_get
|
|
|
|
old_get_many = cache.cache_get_many
|
|
|
|
cache.cache_get = my_cache_get
|
|
|
|
cache.cache_get_many = my_cache_get_many
|
|
|
|
yield cache_queries
|
|
|
|
cache.cache_get = old_get
|
|
|
|
cache.cache_get_many = old_get_many
|
|
|
|
|
|
|
|
@contextmanager
|
2020-06-13 01:57:21 +02:00
|
|
|
def queries_captured(include_savepoints: bool=False) -> Generator[
|
2017-11-05 11:15:10 +01:00
|
|
|
List[Dict[str, Union[str, bytes]]], None, None]:
|
2014-01-27 22:53:36 +01:00
|
|
|
'''
|
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
|
|
|
'''
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
queries: List[Dict[str, Union[str, bytes]]] = []
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def wrapper_execute(self: TimeTrackingCursor,
|
2020-05-04 02:36:15 +02:00
|
|
|
action: Callable[[str, ParamsT], None],
|
|
|
|
sql: Query,
|
|
|
|
params: ParamsT) -> None:
|
2017-02-13 09:19:52 +01:00
|
|
|
cache = get_cache_backend(None)
|
|
|
|
cache.clear()
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return action(sql, params)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
2020-05-04 02:36:15 +02:00
|
|
|
if include_savepoints or not isinstance(sql, str) or 'SAVEPOINT' not in sql:
|
2016-10-26 16:17:25 +02:00
|
|
|
queries.append({
|
|
|
|
'sql': self.mogrify(sql, params).decode('utf-8'),
|
2020-06-10 06:41:04 +02:00
|
|
|
'time': f"{duration:.3f}",
|
2016-10-26 16:17:25 +02:00
|
|
|
})
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2020-05-04 02:36:15 +02:00
|
|
|
def cursor_execute(self: TimeTrackingCursor, sql: Query,
|
|
|
|
params: Optional[Params]=None) -> None:
|
2020-04-22 03:51:22 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params)
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2020-05-04 02:36:15 +02:00
|
|
|
def cursor_executemany(self: TimeTrackingCursor, sql: Query,
|
|
|
|
params: Iterable[Params]) -> None:
|
2020-04-22 03:51:22 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).executemany, sql, params) # nocoverage -- doesn't actually get used in tests
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2020-07-01 00:43:55 +02:00
|
|
|
with mock.patch.multiple(TimeTrackingCursor, execute=cursor_execute, executemany=cursor_executemany):
|
|
|
|
yield queries
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2017-02-09 22:58:43 +01:00
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def stdout_suppressed() -> Iterator[IO[str]]:
|
2017-02-09 22:58:43 +01:00
|
|
|
"""Redirect stdout to /dev/null."""
|
|
|
|
|
|
|
|
with open(os.devnull, 'a') as devnull:
|
2017-08-25 20:01:20 +02:00
|
|
|
stdout, sys.stdout = sys.stdout, devnull
|
2017-02-09 22:58:43 +01:00
|
|
|
yield stdout
|
|
|
|
sys.stdout = stdout
|
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
def reset_emails_in_zulip_realm() -> None:
|
|
|
|
realm = get_realm('zulip')
|
|
|
|
do_set_realm_property(realm, 'email_address_visibility',
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_test_image_file(filename: str) -> IO[Any]:
|
2016-12-19 08:48:03 +01:00
|
|
|
test_avatar_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../tests/images'))
|
|
|
|
return open(os.path.join(test_avatar_dir, filename), 'rb')
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def avatar_disk_path(user_profile: UserProfile, medium: bool=False, original: bool=False) -> str:
|
2016-12-19 08:48:03 +01:00
|
|
|
avatar_url_path = avatar_url(user_profile, medium)
|
2020-07-05 01:58:06 +02:00
|
|
|
assert avatar_url_path is not None
|
2016-12-19 08:48:03 +01:00
|
|
|
avatar_disk_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars",
|
2017-03-03 00:15:05 +01:00
|
|
|
avatar_url_path.split("/")[-2],
|
2016-12-19 08:48:03 +01:00
|
|
|
avatar_url_path.split("/")[-1].split("?")[0])
|
2018-06-06 14:30:26 +02:00
|
|
|
if original:
|
2019-02-14 13:37:32 +01:00
|
|
|
return avatar_disk_path.replace(".png", ".original")
|
2016-12-19 08:48:03 +01:00
|
|
|
return avatar_disk_path
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def make_client(name: str) -> Client:
|
2016-09-13 23:32:35 +02:00
|
|
|
client, _ = Client.objects.get_or_create(name=name)
|
|
|
|
return client
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def find_key_by_email(address: str) -> Optional[str]:
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
2017-07-11 20:52:27 +02:00
|
|
|
key_regex = re.compile("accounts/do_confirm/([a-z0-9]{24})>")
|
2014-01-27 23:43:02 +01:00
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
2020-07-05 01:58:06 +02:00
|
|
|
match = key_regex.search(message.body)
|
|
|
|
assert match is not None
|
|
|
|
[key] = match.groups()
|
|
|
|
return key
|
2017-03-05 09:06:36 +01:00
|
|
|
return None # nocoverage -- in theory a test might want this case, but none do
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_stream_count(user_profile: UserProfile) -> int:
|
2014-01-31 16:44:45 +01:00
|
|
|
return UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
count()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_usermessage(user_profile: UserProfile) -> UserMessage:
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('-message')
|
2017-05-17 21:15:50 +02:00
|
|
|
return query[0] # Django does LIMIT here
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_message(user_profile: UserProfile) -> Message:
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_subscription(stream_name: str, user_profile: UserProfile) -> Subscription:
|
2017-01-30 04:31:24 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient_id = stream.recipient_id
|
2017-01-30 04:31:24 +01:00
|
|
|
return Subscription.objects.get(user_profile=user_profile,
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient_id=recipient_id, active=True)
|
2017-01-30 04:31:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_messages(user_profile: UserProfile) -> List[Message]:
|
2014-01-31 16:44:45 +01:00
|
|
|
query = UserMessage.objects. \
|
|
|
|
select_related("message"). \
|
|
|
|
filter(user_profile=user_profile). \
|
|
|
|
order_by('message')
|
|
|
|
return [um.message for um in query]
|
|
|
|
|
2020-07-01 00:43:55 +02:00
|
|
|
class DummyHandler(AsyncDjangoHandler):
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self) -> None:
|
2020-07-01 00:43:55 +02:00
|
|
|
allocate_handler_id(self)
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class POSTRequestMock:
|
2014-01-31 16:44:45 +01:00
|
|
|
method = "POST"
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, post_data: Dict[str, Any], user_profile: Optional[UserProfile]) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.GET: Dict[str, Any] = {}
|
2020-01-28 06:45:32 +01:00
|
|
|
|
|
|
|
# Convert any integer parameters passed into strings, even
|
|
|
|
# though of course the HTTP API would do so. Ideally, we'd
|
|
|
|
# get rid of this abstraction entirely and just use the HTTP
|
|
|
|
# API directly, but while it exists, we need this code.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.POST: Dict[str, str] = {}
|
2020-01-28 06:45:32 +01:00
|
|
|
for key in post_data:
|
|
|
|
self.POST[key] = str(post_data[key])
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
self.user = user_profile
|
2016-07-14 01:28:40 +02:00
|
|
|
self._tornado_handler = DummyHandler()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self._log_data: Dict[str, Any] = {}
|
2014-01-31 16:44:45 +01:00
|
|
|
self.META = {'PATH_INFO': 'test'}
|
2017-08-04 02:11:50 +02:00
|
|
|
self.path = ''
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class HostRequestMock:
|
2016-09-28 06:06:21 +02:00
|
|
|
"""A mock request object where get_host() works. Useful for testing
|
|
|
|
routes that use Zulip's subdomains feature"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2020-07-05 01:58:06 +02:00
|
|
|
def __init__(self, user_profile: Optional[UserProfile]=None, host: str=settings.EXTERNAL_HOST) -> None:
|
2016-09-28 06:06:21 +02:00
|
|
|
self.host = host
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.GET: Dict[str, Any] = {}
|
|
|
|
self.POST: Dict[str, Any] = {}
|
2017-08-15 01:01:48 +02:00
|
|
|
self.META = {'PATH_INFO': 'test'}
|
2017-08-04 02:11:50 +02:00
|
|
|
self.path = ''
|
2017-08-04 03:59:52 +02:00
|
|
|
self.user = user_profile
|
2017-08-16 04:47:03 +02:00
|
|
|
self.method = ''
|
2017-08-29 07:40:56 +02:00
|
|
|
self.body = ''
|
|
|
|
self.content_type = ''
|
2016-09-28 06:06:21 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_host(self) -> str:
|
2016-09-28 06:06:21 +02:00
|
|
|
return self.host
|
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class MockPythonResponse:
|
2019-05-04 17:54:18 +02:00
|
|
|
def __init__(self, text: str, status_code: int, headers: Optional[Dict[str, str]]=None) -> None:
|
2016-12-13 04:20:33 +01:00
|
|
|
self.text = text
|
|
|
|
self.status_code = status_code
|
2019-05-04 17:54:18 +02:00
|
|
|
if headers is None:
|
|
|
|
headers = {'content-type': 'text/html'}
|
|
|
|
self.headers = headers
|
2016-12-13 04:20:33 +01:00
|
|
|
|
|
|
|
@property
|
2017-11-05 11:15:10 +01:00
|
|
|
def ok(self) -> bool:
|
2016-12-13 04:20:33 +01:00
|
|
|
return self.status_code == 200
|
|
|
|
|
2019-05-04 17:54:18 +02:00
|
|
|
def iter_content(self, n: int) -> Generator[str, Any, None]:
|
|
|
|
yield self.text[:n]
|
|
|
|
|
|
|
|
|
2016-07-28 01:40:28 +02:00
|
|
|
INSTRUMENTING = os.environ.get('TEST_INSTRUMENT_URL_COVERAGE', '') == 'TRUE'
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
INSTRUMENTED_CALLS: List[Dict[str, Any]] = []
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2017-05-17 21:15:50 +02:00
|
|
|
UrlFuncT = Callable[..., HttpResponse] # TODO: make more specific
|
2016-09-12 03:06:25 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def append_instrumentation_data(data: Dict[str, Any]) -> None:
|
2017-02-10 05:42:41 +01:00
|
|
|
INSTRUMENTED_CALLS.append(data)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def instrument_url(f: UrlFuncT) -> UrlFuncT:
|
2017-03-05 09:06:36 +01:00
|
|
|
if not INSTRUMENTING: # nocoverage -- option is always enabled; should we remove?
|
2016-07-28 01:40:28 +02:00
|
|
|
return f
|
|
|
|
else:
|
2018-05-16 01:08:15 +02:00
|
|
|
def wrapper(self: 'ZulipTestCase', url: str, info: Dict[str, Any]={},
|
2017-11-05 11:15:10 +01:00
|
|
|
**kwargs: Any) -> HttpResponse:
|
2016-07-28 01:40:28 +02:00
|
|
|
start = time.time()
|
|
|
|
result = f(self, url, info, **kwargs)
|
|
|
|
delay = time.time() - start
|
|
|
|
test_name = self.id()
|
|
|
|
if '?' in url:
|
|
|
|
url, extra_info = url.split('?', 1)
|
|
|
|
else:
|
|
|
|
extra_info = ''
|
|
|
|
|
2017-02-10 05:42:41 +01:00
|
|
|
append_instrumentation_data(dict(
|
2016-07-28 01:40:28 +02:00
|
|
|
url=url,
|
|
|
|
status_code=result.status_code,
|
|
|
|
method=f.__name__,
|
|
|
|
delay=delay,
|
|
|
|
extra_info=extra_info,
|
|
|
|
info=info,
|
|
|
|
test_name=test_name,
|
|
|
|
kwargs=kwargs))
|
|
|
|
return result
|
|
|
|
return wrapper
|
|
|
|
|
2019-01-11 01:26:11 +01:00
|
|
|
def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> None:
|
2016-07-28 01:40:28 +02:00
|
|
|
if INSTRUMENTING:
|
2016-07-28 02:40:04 +02:00
|
|
|
calls = INSTRUMENTED_CALLS
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
from zproject.urls import urlpatterns, v1_api_and_json_patterns
|
|
|
|
|
|
|
|
# Find our untested urls.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
pattern_cnt: Dict[str, int] = collections.defaultdict(int)
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def re_strip(r: Any) -> str:
|
2016-11-24 19:45:40 +01:00
|
|
|
return str(r).lstrip('^').rstrip('$')
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
|
2016-11-24 19:45:40 +01:00
|
|
|
for pattern in patterns:
|
|
|
|
find_pattern(pattern, prefixes)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def cleanup_url(url: str) -> str:
|
2016-11-24 19:45:40 +01:00
|
|
|
if url.startswith('/'):
|
|
|
|
url = url[1:]
|
|
|
|
if url.startswith('http://testserver/'):
|
|
|
|
url = url[len('http://testserver/'):]
|
|
|
|
if url.startswith('http://zulip.testserver/'):
|
|
|
|
url = url[len('http://zulip.testserver/'):]
|
|
|
|
if url.startswith('http://testserver:9080/'):
|
|
|
|
url = url[len('http://testserver:9080/'):]
|
|
|
|
return url
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_pattern(pattern: Any, prefixes: List[str]) -> None:
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
if isinstance(pattern, type(URLResolver)):
|
2017-03-05 09:06:36 +01:00
|
|
|
return # nocoverage -- shouldn't actually happen
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
if hasattr(pattern, 'url_patterns'):
|
|
|
|
return
|
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
canon_pattern = prefixes[0] + re_strip(pattern.pattern.regex.pattern)
|
2016-11-24 19:45:40 +01:00
|
|
|
cnt = 0
|
|
|
|
for call in calls:
|
|
|
|
if 'pattern' in call:
|
|
|
|
continue
|
|
|
|
|
|
|
|
url = cleanup_url(call['url'])
|
|
|
|
|
|
|
|
for prefix in prefixes:
|
|
|
|
if url.startswith(prefix):
|
|
|
|
match_url = url[len(prefix):]
|
2018-02-02 05:43:18 +01:00
|
|
|
if pattern.resolve(match_url):
|
2016-11-24 19:45:40 +01:00
|
|
|
if call['status_code'] in [200, 204, 301, 302]:
|
|
|
|
cnt += 1
|
|
|
|
call['pattern'] = canon_pattern
|
|
|
|
pattern_cnt[canon_pattern] += cnt
|
|
|
|
|
|
|
|
find_patterns(urlpatterns, ['', 'en/', 'de/'])
|
|
|
|
find_patterns(v1_api_and_json_patterns, ['api/v1/', 'json/'])
|
|
|
|
|
|
|
|
assert len(pattern_cnt) > 100
|
2020-05-07 13:56:49 +02:00
|
|
|
untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
exempt_patterns = set([
|
2017-03-23 19:59:24 +01:00
|
|
|
# We exempt some patterns that are called via Tornado.
|
2016-11-24 19:45:40 +01:00
|
|
|
'api/v1/events',
|
2018-07-13 12:58:16 +02:00
|
|
|
'api/v1/events/internal',
|
2016-11-24 19:45:40 +01:00
|
|
|
'api/v1/register',
|
2017-03-23 19:59:24 +01:00
|
|
|
# We also exempt some development environment debugging
|
|
|
|
# static content URLs, since the content they point to may
|
|
|
|
# or may not exist.
|
2020-05-05 13:39:57 +02:00
|
|
|
'coverage/(?P<path>.+)',
|
|
|
|
'node-coverage/(?P<path>.+)',
|
|
|
|
'docs/(?P<path>.+)',
|
|
|
|
'casper/(?P<path>.+)',
|
2019-07-18 11:27:16 +02:00
|
|
|
'static/(?P<path>.*)',
|
2019-01-11 01:26:11 +01:00
|
|
|
] + [webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks])
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
untested_patterns -= exempt_patterns
|
|
|
|
|
2017-05-17 21:15:50 +02:00
|
|
|
var_dir = 'var' # TODO make sure path is robust here
|
2016-07-28 01:40:28 +02:00
|
|
|
fn = os.path.join(var_dir, 'url_coverage.txt')
|
|
|
|
with open(fn, 'w') as f:
|
2016-07-28 02:40:04 +02:00
|
|
|
for call in calls:
|
2016-08-18 17:10:15 +02:00
|
|
|
try:
|
|
|
|
line = ujson.dumps(call)
|
|
|
|
f.write(line + '\n')
|
2017-03-05 09:06:36 +01:00
|
|
|
except OverflowError: # nocoverage -- test suite error handling
|
2016-08-18 17:10:15 +02:00
|
|
|
print('''
|
|
|
|
A JSON overflow error was encountered while
|
|
|
|
producing the URL coverage report. Sometimes
|
|
|
|
this indicates that a test is passing objects
|
|
|
|
into methods like client_post(), which is
|
|
|
|
unnecessary and leads to false positives.
|
|
|
|
''')
|
|
|
|
print(call)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
if full_suite:
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f'INFO: URL coverage report is in {fn}')
|
2016-11-30 01:40:05 +01:00
|
|
|
print('INFO: Try running: ./tools/create-test-api-docs')
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2017-03-05 09:06:36 +01:00
|
|
|
if full_suite and len(untested_patterns): # nocoverage -- test suite error handling
|
2016-11-19 01:28:28 +01:00
|
|
|
print("\nERROR: Some URLs are untested! Here's the list of untested URLs:")
|
2016-07-28 02:40:04 +02:00
|
|
|
for untested_pattern in sorted(untested_patterns):
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f" {untested_pattern}")
|
2016-11-19 01:28:28 +01:00
|
|
|
sys.exit(1)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def load_subdomain_token(response: HttpResponse) -> ExternalAuthDataDict:
|
2017-10-27 02:45:38 +02:00
|
|
|
assert isinstance(response, HttpResponseRedirect)
|
|
|
|
token = response.url.rsplit('/', 1)[1]
|
2020-02-23 18:58:08 +01:00
|
|
|
data = ExternalAuthResult(login_token=token, delete_stored_data=False).data_dict
|
2020-01-23 12:21:55 +01:00
|
|
|
assert data is not None
|
|
|
|
return data
|
2017-10-28 22:52:40 +02:00
|
|
|
|
|
|
|
FuncT = TypeVar('FuncT', bound=Callable[..., None])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def use_s3_backend(method: FuncT) -> FuncT:
|
2018-12-07 17:52:01 +01:00
|
|
|
@mock_s3
|
2017-10-28 22:52:40 +02:00
|
|
|
@override_settings(LOCAL_UPLOADS_DIR=None)
|
2017-11-05 11:15:10 +01:00
|
|
|
def new_method(*args: Any, **kwargs: Any) -> Any:
|
2017-10-28 22:52:40 +02:00
|
|
|
zerver.lib.upload.upload_backend = S3UploadBackend()
|
|
|
|
try:
|
|
|
|
return method(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
zerver.lib.upload.upload_backend = LocalUploadBackend()
|
|
|
|
return new_method
|
2018-04-25 00:37:55 +02:00
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
def create_s3_buckets(*bucket_names: Tuple[str]) -> List[ServiceResource]:
|
|
|
|
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
s3 = session.resource('s3')
|
|
|
|
buckets = [s3.create_bucket(Bucket=name) for name in bucket_names]
|
2018-12-07 18:15:51 +01:00
|
|
|
return buckets
|
|
|
|
|
2019-05-13 07:04:31 +02:00
|
|
|
def use_db_models(method: Callable[..., None]) -> Callable[..., None]: # nocoverage
|
2018-04-25 00:37:55 +02:00
|
|
|
def method_patched_with_mock(self: 'MigrationsTestCase', apps: StateApps) -> None:
|
|
|
|
ArchivedAttachment = apps.get_model('zerver', 'ArchivedAttachment')
|
|
|
|
ArchivedMessage = apps.get_model('zerver', 'ArchivedMessage')
|
|
|
|
ArchivedUserMessage = apps.get_model('zerver', 'ArchivedUserMessage')
|
|
|
|
Attachment = apps.get_model('zerver', 'Attachment')
|
|
|
|
BotConfigData = apps.get_model('zerver', 'BotConfigData')
|
|
|
|
BotStorageData = apps.get_model('zerver', 'BotStorageData')
|
|
|
|
Client = apps.get_model('zerver', 'Client')
|
|
|
|
CustomProfileField = apps.get_model('zerver', 'CustomProfileField')
|
|
|
|
CustomProfileFieldValue = apps.get_model('zerver', 'CustomProfileFieldValue')
|
|
|
|
DefaultStream = apps.get_model('zerver', 'DefaultStream')
|
|
|
|
DefaultStreamGroup = apps.get_model('zerver', 'DefaultStreamGroup')
|
|
|
|
EmailChangeStatus = apps.get_model('zerver', 'EmailChangeStatus')
|
|
|
|
Huddle = apps.get_model('zerver', 'Huddle')
|
|
|
|
Message = apps.get_model('zerver', 'Message')
|
|
|
|
MultiuseInvite = apps.get_model('zerver', 'MultiuseInvite')
|
|
|
|
MutedTopic = apps.get_model('zerver', 'MutedTopic')
|
|
|
|
PreregistrationUser = apps.get_model('zerver', 'PreregistrationUser')
|
|
|
|
PushDeviceToken = apps.get_model('zerver', 'PushDeviceToken')
|
|
|
|
Reaction = apps.get_model('zerver', 'Reaction')
|
|
|
|
Realm = apps.get_model('zerver', 'Realm')
|
|
|
|
RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog')
|
|
|
|
RealmDomain = apps.get_model('zerver', 'RealmDomain')
|
|
|
|
RealmEmoji = apps.get_model('zerver', 'RealmEmoji')
|
|
|
|
RealmFilter = apps.get_model('zerver', 'RealmFilter')
|
|
|
|
Recipient = apps.get_model('zerver', 'Recipient')
|
2019-12-25 22:18:36 +01:00
|
|
|
Recipient.PERSONAL = 1
|
|
|
|
Recipient.STREAM = 2
|
|
|
|
Recipient.HUDDLE = 3
|
2018-04-25 00:37:55 +02:00
|
|
|
ScheduledEmail = apps.get_model('zerver', 'ScheduledEmail')
|
|
|
|
ScheduledMessage = apps.get_model('zerver', 'ScheduledMessage')
|
|
|
|
Service = apps.get_model('zerver', 'Service')
|
|
|
|
Stream = apps.get_model('zerver', 'Stream')
|
|
|
|
Subscription = apps.get_model('zerver', 'Subscription')
|
|
|
|
UserActivity = apps.get_model('zerver', 'UserActivity')
|
|
|
|
UserActivityInterval = apps.get_model('zerver', 'UserActivityInterval')
|
|
|
|
UserGroup = apps.get_model('zerver', 'UserGroup')
|
|
|
|
UserGroupMembership = apps.get_model('zerver', 'UserGroupMembership')
|
|
|
|
UserHotspot = apps.get_model('zerver', 'UserHotspot')
|
|
|
|
UserMessage = apps.get_model('zerver', 'UserMessage')
|
|
|
|
UserPresence = apps.get_model('zerver', 'UserPresence')
|
|
|
|
UserProfile = apps.get_model('zerver', 'UserProfile')
|
|
|
|
|
|
|
|
zerver_models_patch = mock.patch.multiple(
|
|
|
|
'zerver.models',
|
|
|
|
ArchivedAttachment=ArchivedAttachment,
|
|
|
|
ArchivedMessage=ArchivedMessage,
|
|
|
|
ArchivedUserMessage=ArchivedUserMessage,
|
|
|
|
Attachment=Attachment,
|
|
|
|
BotConfigData=BotConfigData,
|
|
|
|
BotStorageData=BotStorageData,
|
|
|
|
Client=Client,
|
|
|
|
CustomProfileField=CustomProfileField,
|
|
|
|
CustomProfileFieldValue=CustomProfileFieldValue,
|
|
|
|
DefaultStream=DefaultStream,
|
|
|
|
DefaultStreamGroup=DefaultStreamGroup,
|
|
|
|
EmailChangeStatus=EmailChangeStatus,
|
|
|
|
Huddle=Huddle,
|
|
|
|
Message=Message,
|
|
|
|
MultiuseInvite=MultiuseInvite,
|
|
|
|
MutedTopic=MutedTopic,
|
|
|
|
PreregistrationUser=PreregistrationUser,
|
|
|
|
PushDeviceToken=PushDeviceToken,
|
|
|
|
Reaction=Reaction,
|
|
|
|
Realm=Realm,
|
|
|
|
RealmAuditLog=RealmAuditLog,
|
|
|
|
RealmDomain=RealmDomain,
|
|
|
|
RealmEmoji=RealmEmoji,
|
|
|
|
RealmFilter=RealmFilter,
|
|
|
|
Recipient=Recipient,
|
|
|
|
ScheduledEmail=ScheduledEmail,
|
|
|
|
ScheduledMessage=ScheduledMessage,
|
|
|
|
Service=Service,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserActivity=UserActivity,
|
|
|
|
UserActivityInterval=UserActivityInterval,
|
|
|
|
UserGroup=UserGroup,
|
|
|
|
UserGroupMembership=UserGroupMembership,
|
|
|
|
UserHotspot=UserHotspot,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserPresence=UserPresence,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
UserProfile=UserProfile,
|
2018-04-25 00:37:55 +02:00
|
|
|
)
|
|
|
|
zerver_test_helpers_patch = mock.patch.multiple(
|
|
|
|
'zerver.lib.test_helpers',
|
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
|
|
|
zerver_test_classes_patch = mock.patch.multiple(
|
|
|
|
'zerver.lib.test_classes',
|
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Realm=Realm,
|
|
|
|
Recipient=Recipient,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
|
|
|
with zerver_models_patch,\
|
|
|
|
zerver_test_helpers_patch,\
|
|
|
|
zerver_test_classes_patch:
|
|
|
|
method(self, apps)
|
|
|
|
return method_patched_with_mock
|
2019-08-04 20:45:24 +02:00
|
|
|
|
|
|
|
def create_dummy_file(filename: str) -> str:
|
|
|
|
filepath = os.path.join(settings.TEST_WORKER_DIR, filename)
|
|
|
|
with open(filepath, 'w') as f:
|
|
|
|
f.write('zulip!')
|
|
|
|
return filepath
|
2020-07-16 18:08:15 +02:00
|
|
|
|
|
|
|
def zulip_reaction_info() -> Dict[str, str]:
|
|
|
|
return dict(
|
|
|
|
emoji_name='zulip',
|
|
|
|
emoji_code='zulip',
|
|
|
|
reaction_type='zulip_extra_emoji',
|
|
|
|
)
|