2020-06-11 00:54:34 +02:00
|
|
|
import os
|
2017-04-11 08:05:43 +02:00
|
|
|
import random
|
2020-06-11 00:54:34 +02:00
|
|
|
import shutil
|
2020-04-18 03:55:04 +02:00
|
|
|
import sys
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
|
|
|
import unittest
|
|
|
|
from functools import partial
|
|
|
|
from multiprocessing.sharedctypes import Synchronized
|
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, TypeVar, Union
|
2019-08-10 00:30:34 +02:00
|
|
|
from unittest import loader, runner
|
2017-02-10 05:48:15 +01:00
|
|
|
from unittest.result import TestResult
|
2016-05-20 14:53:47 +02:00
|
|
|
|
2017-04-13 11:24:44 +02:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db import ProgrammingError, connections
|
2016-06-05 08:05:39 +02:00
|
|
|
from django.test import TestCase
|
2017-02-10 05:49:28 +01:00
|
|
|
from django.test import runner as django_runner
|
|
|
|
from django.test.runner import DiscoverRunner
|
2016-05-20 14:53:47 +02:00
|
|
|
from django.test.signals import template_rendered
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.urls.resolvers import URLPattern
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from scripts.lib.zulip_tools import (
|
|
|
|
TEMPLATE_DATABASE_DIR,
|
|
|
|
get_dev_uuid_var_path,
|
|
|
|
get_or_create_dev_uuid_var_path,
|
|
|
|
)
|
2020-01-16 22:02:06 +01:00
|
|
|
from zerver.lib import test_helpers
|
2014-01-29 00:47:48 +01:00
|
|
|
from zerver.lib.cache import bounce_key_prefix_for_testing
|
2017-05-05 12:07:10 +02:00
|
|
|
from zerver.lib.rate_limiter import bounce_redis_key_prefix_for_testing
|
2016-07-19 08:12:35 +02:00
|
|
|
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_helpers import append_instrumentation_data, write_instrumentation_reports
|
2019-06-11 18:36:27 +02:00
|
|
|
|
2019-05-29 00:50:12 +02:00
|
|
|
# We need to pick an ID for this test-backend invocation, and store it
|
|
|
|
# in this global so it can be used in init_worker; this is used to
|
|
|
|
# ensure the database IDs we select are unique for each `test-backend`
|
|
|
|
# run. This probably should use a locking mechanism rather than the
|
2019-06-04 16:52:23 +02:00
|
|
|
# below hack, which fails 1/10000000 of the time.
|
2020-02-28 02:15:40 +01:00
|
|
|
random_id_range_start = str(random.randint(1, 10000000))
|
|
|
|
|
|
|
|
def get_database_id(worker_id: Optional[int]=None) -> str:
|
|
|
|
if worker_id:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{random_id_range_start}_{worker_id}"
|
2020-02-28 02:15:40 +01:00
|
|
|
return random_id_range_start
|
|
|
|
|
2019-07-02 22:15:51 +02:00
|
|
|
# The root directory for this run of the test suite.
|
|
|
|
TEST_RUN_DIR = get_or_create_dev_uuid_var_path(
|
2020-06-09 00:25:09 +02:00
|
|
|
os.path.join('test-backend', f'run_{get_database_id()}'))
|
2019-05-29 00:50:12 +02:00
|
|
|
|
2017-02-10 05:49:28 +01:00
|
|
|
_worker_id = 0 # Used to identify the worker process.
|
2017-02-10 12:40:14 +01:00
|
|
|
|
2017-10-31 07:53:44 +01:00
|
|
|
ReturnT = TypeVar('ReturnT') # Constrain return type to match
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def slow(slowness_reason: str) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
|
2014-01-29 00:47:48 +01:00
|
|
|
'''
|
|
|
|
This is a decorate that annotates a test as being "known
|
|
|
|
to be slow." The decorator will set expected_run_time and slowness_reason
|
2017-11-09 16:26:38 +01:00
|
|
|
as attributes of the function. Other code can use this annotation
|
2014-01-29 00:47:48 +01:00
|
|
|
as needed, e.g. to exclude these tests in "fast" mode.
|
|
|
|
'''
|
2019-08-10 00:30:34 +02:00
|
|
|
def decorator(f: Callable[..., ReturnT]) -> Callable[..., ReturnT]:
|
|
|
|
setattr(f, 'slowness_reason', slowness_reason)
|
2014-01-29 00:47:48 +01:00
|
|
|
return f
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
2019-08-10 00:30:34 +02:00
|
|
|
def is_known_slow_test(test_method: Callable[..., ReturnT]) -> bool:
|
2014-01-29 00:47:48 +01:00
|
|
|
return hasattr(test_method, 'slowness_reason')
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def full_test_name(test: TestCase) -> str:
|
2014-01-29 20:20:16 +01:00
|
|
|
test_module = test.__module__
|
2014-01-29 00:47:48 +01:00
|
|
|
test_class = test.__class__.__name__
|
|
|
|
test_method = test._testMethodName
|
2020-06-10 06:41:04 +02:00
|
|
|
return f'{test_module}.{test_class}.{test_method}'
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_test_method(test: TestCase) -> Callable[[], None]:
|
2014-01-29 00:47:48 +01:00
|
|
|
return getattr(test, test._testMethodName)
|
|
|
|
|
2016-07-29 19:48:43 +02:00
|
|
|
# Each tuple is delay, test_name, slowness_reason
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
TEST_TIMINGS: List[Tuple[float, str, str]] = []
|
2016-07-29 19:48:43 +02:00
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def report_slow_tests() -> None:
|
2016-07-29 19:48:43 +02:00
|
|
|
timings = sorted(TEST_TIMINGS, reverse=True)
|
|
|
|
print('SLOWNESS REPORT')
|
|
|
|
print(' delay test')
|
|
|
|
print(' ---- ----')
|
|
|
|
for delay, test_name, slowness_reason in timings[:15]:
|
|
|
|
if not slowness_reason:
|
|
|
|
slowness_reason = 'UNKNOWN WHY SLOW, please investigate'
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f' {delay:0.3f} {test_name}\n {slowness_reason}\n')
|
2016-07-29 19:48:43 +02:00
|
|
|
|
|
|
|
print('...')
|
|
|
|
for delay, test_name, slowness_reason in timings[100:]:
|
|
|
|
if slowness_reason:
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f' {delay:.3f} {test_name} is not that slow')
|
2016-07-29 19:48:43 +02:00
|
|
|
print(' consider removing @slow decorator')
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f' This may no longer be true: {slowness_reason}')
|
2016-07-29 19:48:43 +02:00
|
|
|
|
2019-08-10 00:30:34 +02:00
|
|
|
def enforce_timely_test_completion(test_method: Callable[..., ReturnT], test_name: str,
|
2019-07-23 01:45:01 +02:00
|
|
|
delay: float, result: unittest.TestResult) -> None:
|
2016-07-29 20:58:22 +02:00
|
|
|
if hasattr(test_method, 'slowness_reason'):
|
2017-10-28 00:38:17 +02:00
|
|
|
max_delay = 2.0 # seconds
|
2014-01-29 00:47:48 +01:00
|
|
|
else:
|
2017-05-17 21:17:21 +02:00
|
|
|
max_delay = 0.4 # seconds
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2019-07-23 01:45:01 +02:00
|
|
|
assert isinstance(result, TextTestResult) or isinstance(result, RemoteTestResult)
|
|
|
|
|
2014-01-29 00:47:48 +01:00
|
|
|
if delay > max_delay:
|
2020-06-10 06:41:04 +02:00
|
|
|
msg = f'** Test is TOO slow: {test_name} ({delay:.3f} s)\n'
|
2017-02-10 05:48:15 +01:00
|
|
|
result.addInfo(test_method, msg)
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fast_tests_only() -> bool:
|
2016-01-26 01:23:21 +01:00
|
|
|
return "FAST_TESTS_ONLY" in os.environ
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def run_test(test: TestCase, result: TestResult) -> bool:
|
2016-01-23 23:18:26 +01:00
|
|
|
failed = False
|
2014-01-29 00:47:48 +01:00
|
|
|
test_method = get_test_method(test)
|
|
|
|
|
|
|
|
if fast_tests_only() and is_known_slow_test(test_method):
|
2016-01-27 22:48:04 +01:00
|
|
|
return failed
|
2014-01-29 00:47:48 +01:00
|
|
|
|
|
|
|
test_name = full_test_name(test)
|
|
|
|
|
|
|
|
bounce_key_prefix_for_testing(test_name)
|
2017-05-05 12:07:10 +02:00
|
|
|
bounce_redis_key_prefix_for_testing(test_name)
|
|
|
|
|
2020-04-18 03:55:04 +02:00
|
|
|
try:
|
|
|
|
test._pre_setup()
|
|
|
|
except Exception:
|
|
|
|
result.addError(test, sys.exc_info())
|
2018-01-09 17:26:09 +01:00
|
|
|
return True
|
2014-01-29 00:47:48 +01:00
|
|
|
|
|
|
|
start_time = time.time()
|
|
|
|
|
2017-02-10 05:48:15 +01:00
|
|
|
test(result) # unittest will handle skipping, error, failure and success.
|
2014-01-29 00:47:48 +01:00
|
|
|
|
|
|
|
delay = time.time() - start_time
|
2017-02-10 05:48:15 +01:00
|
|
|
enforce_timely_test_completion(test_method, test_name, delay, result)
|
2016-07-29 19:48:43 +02:00
|
|
|
slowness_reason = getattr(test_method, 'slowness_reason', '')
|
|
|
|
TEST_TIMINGS.append((delay, test_name, slowness_reason))
|
2014-01-29 00:47:48 +01:00
|
|
|
|
|
|
|
test._post_teardown()
|
2016-01-23 23:18:26 +01:00
|
|
|
return failed
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2017-02-10 05:48:15 +01:00
|
|
|
class TextTestResult(runner.TextTestResult):
|
|
|
|
"""
|
|
|
|
This class has unpythonic function names because base class follows
|
|
|
|
this style.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.failed_tests: List[str] = []
|
2016-12-23 18:42:45 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def addInfo(self, test: TestCase, msg: str) -> None:
|
2020-06-16 23:33:26 +02:00
|
|
|
self.stream.write(msg)
|
|
|
|
self.stream.flush()
|
2017-02-10 05:48:15 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def addInstrumentation(self, test: TestCase, data: Dict[str, Any]) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
append_instrumentation_data(data)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def startTest(self, test: TestCase) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
TestResult.startTest(self, test)
|
2020-06-09 00:25:09 +02:00
|
|
|
self.stream.writeln(f"Running {full_test_name(test)}") # type: ignore[attr-defined] # https://github.com/python/typeshed/issues/3139
|
2020-06-16 23:33:26 +02:00
|
|
|
self.stream.flush()
|
2017-02-10 05:48:15 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def addSuccess(self, *args: Any, **kwargs: Any) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
TestResult.addSuccess(self, *args, **kwargs)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def addError(self, *args: Any, **kwargs: Any) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
TestResult.addError(self, *args, **kwargs)
|
2017-05-09 08:03:00 +02:00
|
|
|
test_name = full_test_name(args[0])
|
|
|
|
self.failed_tests.append(test_name)
|
2017-02-10 05:48:15 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def addFailure(self, *args: Any, **kwargs: Any) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
TestResult.addFailure(self, *args, **kwargs)
|
2016-12-23 18:42:45 +01:00
|
|
|
test_name = full_test_name(args[0])
|
|
|
|
self.failed_tests.append(test_name)
|
2017-02-10 05:48:15 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def addSkip(self, test: TestCase, reason: str) -> None:
|
2017-02-10 05:48:15 +01:00
|
|
|
TestResult.addSkip(self, test, reason)
|
2020-04-22 04:13:37 +02:00
|
|
|
self.stream.writeln("** Skipping {}: {}".format( # type: ignore[attr-defined] # https://github.com/python/typeshed/issues/3139
|
2019-07-22 21:54:25 +02:00
|
|
|
full_test_name(test),
|
|
|
|
reason))
|
2020-06-16 23:33:26 +02:00
|
|
|
self.stream.flush()
|
2017-02-10 05:48:15 +01:00
|
|
|
|
2017-02-10 05:49:28 +01:00
|
|
|
class RemoteTestResult(django_runner.RemoteTestResult):
|
|
|
|
"""
|
|
|
|
The class follows the unpythonic style of function names of the
|
|
|
|
base class.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def addInfo(self, test: TestCase, msg: str) -> None:
|
2017-02-10 05:49:28 +01:00
|
|
|
self.events.append(('addInfo', self.test_index, msg))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def addInstrumentation(self, test: TestCase, data: Dict[str, Any]) -> None:
|
2017-02-10 05:49:28 +01:00
|
|
|
# Some elements of data['info'] cannot be serialized.
|
|
|
|
if 'info' in data:
|
|
|
|
del data['info']
|
|
|
|
|
|
|
|
self.events.append(('addInstrumentation', self.test_index, data))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def process_instrumented_calls(func: Callable[[Dict[str, Any]], None]) -> None:
|
2017-02-10 05:49:28 +01:00
|
|
|
for call in test_helpers.INSTRUMENTED_CALLS:
|
|
|
|
func(call)
|
|
|
|
|
2017-10-28 01:57:22 +02:00
|
|
|
SerializedSubsuite = Tuple[Type['TestSuite'], List[str]]
|
2017-07-10 07:27:17 +02:00
|
|
|
SubsuiteArgs = Tuple[Type['RemoteTestRunner'], int, SerializedSubsuite, bool]
|
2017-06-01 09:08:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def run_subsuite(args: SubsuiteArgs) -> Tuple[int, Any]:
|
2017-04-25 12:03:05 +02:00
|
|
|
# Reset the accumulated INSTRUMENTED_CALLS before running this subsuite.
|
|
|
|
test_helpers.INSTRUMENTED_CALLS = []
|
2017-07-10 07:27:17 +02:00
|
|
|
# The first argument is the test runner class but we don't need it
|
|
|
|
# because we run our own version of the runner class.
|
|
|
|
_, subsuite_index, subsuite, failfast = args
|
2017-02-10 05:49:28 +01:00
|
|
|
runner = RemoteTestRunner(failfast=failfast)
|
|
|
|
result = runner.run(deserialize_suite(subsuite))
|
|
|
|
# Now we send instrumentation related events. This data will be
|
|
|
|
# appended to the data structure in the main thread. For Mypy,
|
|
|
|
# type of Partial is different from Callable. All the methods of
|
|
|
|
# TestResult are passed TestCase as the first argument but
|
|
|
|
# addInstrumentation does not need it.
|
2017-08-25 20:01:20 +02:00
|
|
|
process_instrumented_calls(partial(result.addInstrumentation, None))
|
2017-02-10 05:49:28 +01:00
|
|
|
return subsuite_index, result.events
|
|
|
|
|
2019-06-04 16:44:56 +02:00
|
|
|
def destroy_test_databases(worker_id: Optional[int]=None) -> None:
|
2017-02-10 05:49:28 +01:00
|
|
|
for alias in connections:
|
|
|
|
connection = connections[alias]
|
|
|
|
try:
|
2019-06-11 19:32:46 +02:00
|
|
|
# In the parallel mode, the test databases are created
|
|
|
|
# through the N=self.parallel child processes, and in the
|
|
|
|
# parent process (which calls `destroy_test_databases`),
|
|
|
|
# `settings_dict` remains unchanged, with the original
|
|
|
|
# template database name (zulip_test_template). So to
|
|
|
|
# delete the database zulip_test_template_<number>, we
|
|
|
|
# need to pass `number` to `destroy_test_db`.
|
|
|
|
#
|
|
|
|
# When we run in serial mode (self.parallel=1), we don't
|
|
|
|
# fork and thus both creation and destruction occur in the
|
|
|
|
# same process, which means `settings_dict` has been
|
|
|
|
# updated to have `zulip_test_template_<number>` as its
|
|
|
|
# database name by the creation code. As a result, to
|
|
|
|
# delete that database, we need to not pass a number
|
|
|
|
# argument to destroy_test_db.
|
2019-06-04 16:44:56 +02:00
|
|
|
if worker_id is not None:
|
|
|
|
"""Modified from the Django original to """
|
2020-02-28 02:15:40 +01:00
|
|
|
database_id = get_database_id(worker_id)
|
|
|
|
connection.creation.destroy_test_db(suffix=database_id)
|
2019-06-04 16:44:56 +02:00
|
|
|
else:
|
|
|
|
connection.creation.destroy_test_db()
|
2017-04-14 18:16:47 +02:00
|
|
|
except ProgrammingError:
|
2017-02-10 05:49:28 +01:00
|
|
|
# DB doesn't exist. No need to do anything.
|
|
|
|
pass
|
|
|
|
|
2019-05-29 00:50:12 +02:00
|
|
|
def create_test_databases(worker_id: int) -> None:
|
2020-02-28 02:15:40 +01:00
|
|
|
database_id = get_database_id(worker_id)
|
2017-04-11 07:32:49 +02:00
|
|
|
for alias in connections:
|
|
|
|
connection = connections[alias]
|
2017-02-10 05:49:28 +01:00
|
|
|
connection.creation.clone_test_db(
|
2020-02-28 02:15:40 +01:00
|
|
|
suffix=database_id,
|
2017-02-10 05:49:28 +01:00
|
|
|
keepdb=True,
|
|
|
|
)
|
|
|
|
|
2017-04-11 07:32:49 +02:00
|
|
|
settings_dict = connection.creation.get_test_db_clone_settings(database_id)
|
2017-02-10 05:49:28 +01:00
|
|
|
# connection.settings_dict must be updated in place for changes to be
|
|
|
|
# reflected in django.db.connections. If the following line assigned
|
|
|
|
# connection.settings_dict = settings_dict, new threads would connect
|
|
|
|
# to the default database instead of the appropriate clone.
|
|
|
|
connection.settings_dict.update(settings_dict)
|
|
|
|
connection.close()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def init_worker(counter: Synchronized) -> None:
|
2017-04-18 06:40:24 +02:00
|
|
|
"""
|
|
|
|
This function runs only under parallel mode. It initializes the
|
|
|
|
individual processes which are also called workers.
|
|
|
|
"""
|
2017-04-11 07:32:49 +02:00
|
|
|
global _worker_id
|
2017-04-21 08:31:21 +02:00
|
|
|
|
|
|
|
with counter.get_lock():
|
|
|
|
counter.value += 1
|
|
|
|
_worker_id = counter.value
|
|
|
|
|
|
|
|
"""
|
|
|
|
You can now use _worker_id.
|
|
|
|
"""
|
|
|
|
|
2017-04-11 07:32:49 +02:00
|
|
|
# Clear the cache
|
|
|
|
from zerver.lib.cache import get_cache_backend
|
|
|
|
cache = get_cache_backend(None)
|
|
|
|
cache.clear()
|
|
|
|
|
|
|
|
# Close all connections
|
|
|
|
connections.close_all()
|
|
|
|
|
|
|
|
destroy_test_databases(_worker_id)
|
|
|
|
create_test_databases(_worker_id)
|
2019-07-05 21:50:51 +02:00
|
|
|
initialize_worker_path(_worker_id)
|
2017-04-25 09:50:13 +02:00
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
def is_upload_avatar_url(url: URLPattern) -> bool:
|
|
|
|
if url.pattern.regex.pattern == r'^user_avatars/(?P<path>.*)$':
|
2017-04-25 09:50:13 +02:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
# We manually update the upload directory path in the url regex.
|
|
|
|
from zproject import dev_urls
|
|
|
|
found = False
|
|
|
|
for url in dev_urls.urls:
|
|
|
|
if is_upload_avatar_url(url):
|
|
|
|
found = True
|
|
|
|
new_root = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")
|
|
|
|
url.default_args['document_root'] = new_root
|
|
|
|
|
|
|
|
if not found:
|
|
|
|
print("*** Upload directory not found.")
|
|
|
|
|
2017-02-10 12:40:14 +01:00
|
|
|
class TestSuite(unittest.TestSuite):
|
2020-06-13 01:57:21 +02:00
|
|
|
def run(self, result: TestResult, debug: bool=False) -> TestResult:
|
2017-02-16 08:39:57 +01:00
|
|
|
"""
|
|
|
|
This function mostly contains the code from
|
|
|
|
unittest.TestSuite.run. The need to override this function
|
|
|
|
occurred because we use run_test to run the testcase.
|
|
|
|
"""
|
|
|
|
topLevel = False
|
|
|
|
if getattr(result, '_testRunEntered', False) is False:
|
2020-04-22 04:13:37 +02:00
|
|
|
result._testRunEntered = topLevel = True # type: ignore[attr-defined]
|
2017-02-16 08:39:57 +01:00
|
|
|
|
2017-08-25 20:01:20 +02:00
|
|
|
for test in self:
|
2017-02-16 08:39:57 +01:00
|
|
|
# but this is correct. Taken from unittest.
|
|
|
|
if result.shouldStop:
|
2017-02-12 08:01:08 +01:00
|
|
|
break
|
2017-02-10 12:40:14 +01:00
|
|
|
|
2017-02-16 08:39:57 +01:00
|
|
|
if isinstance(test, TestSuite):
|
|
|
|
test.run(result, debug=debug)
|
|
|
|
else:
|
2020-04-22 04:13:37 +02:00
|
|
|
self._tearDownPreviousClass(test, result) # type: ignore[attr-defined]
|
|
|
|
self._handleModuleFixture(test, result) # type: ignore[attr-defined]
|
|
|
|
self._handleClassSetUp(test, result) # type: ignore[attr-defined]
|
|
|
|
result._previousTestClass = test.__class__ # type: ignore[attr-defined]
|
2017-02-16 08:39:57 +01:00
|
|
|
if (getattr(test.__class__, '_classSetupFailed', False) or
|
|
|
|
getattr(result, '_moduleSetUpFailed', False)):
|
|
|
|
continue
|
|
|
|
|
|
|
|
failed = run_test(test, result)
|
|
|
|
if failed or result.shouldStop:
|
|
|
|
result.shouldStop = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if topLevel:
|
2020-04-22 04:13:37 +02:00
|
|
|
self._tearDownPreviousClass(None, result) # type: ignore[attr-defined]
|
|
|
|
self._handleModuleTearDown(result) # type: ignore[attr-defined]
|
|
|
|
result._testRunEntered = False # type: ignore[attr-defined]
|
2017-02-10 12:40:14 +01:00
|
|
|
return result
|
|
|
|
|
|
|
|
class TestLoader(loader.TestLoader):
|
|
|
|
suiteClass = TestSuite
|
|
|
|
|
2017-02-10 05:49:28 +01:00
|
|
|
class ParallelTestSuite(django_runner.ParallelTestSuite):
|
2017-06-06 10:04:20 +02:00
|
|
|
run_subsuite = run_subsuite
|
2017-02-10 05:49:28 +01:00
|
|
|
init_worker = init_worker
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, suite: TestSuite, processes: int, failfast: bool) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(suite, processes, failfast)
|
2017-11-02 18:32:14 +01:00
|
|
|
# We can't specify a consistent type for self.subsuites, since
|
|
|
|
# the whole idea here is to monkey-patch that so we can use
|
|
|
|
# most of django_runner.ParallelTestSuite with our own suite
|
|
|
|
# definitions.
|
2020-06-23 08:39:03 +02:00
|
|
|
assert not isinstance(self.subsuites, SubSuiteList)
|
|
|
|
self.subsuites: Union[SubSuiteList, List[TestSuite]] = SubSuiteList(self.subsuites)
|
2017-02-10 05:49:28 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_import_error(test_name: str) -> None:
|
2018-01-09 17:26:09 +01:00
|
|
|
try:
|
2020-03-28 01:25:56 +01:00
|
|
|
# Directly using __import__ is not recommended, but here it gives
|
2018-01-19 06:00:13 +01:00
|
|
|
# clearer traceback as compared to importlib.import_module.
|
|
|
|
__import__(test_name)
|
|
|
|
except ImportError as exc:
|
|
|
|
raise exc from exc # Disable exception chaining in Python 3.
|
2018-01-02 22:59:17 +01:00
|
|
|
|
2019-07-05 21:50:51 +02:00
|
|
|
|
|
|
|
def initialize_worker_path(worker_id: int) -> None:
|
|
|
|
# Allow each test worker process to write to a unique directory
|
|
|
|
# within `TEST_RUN_DIR`.
|
2020-06-09 00:25:09 +02:00
|
|
|
worker_path = os.path.join(TEST_RUN_DIR, f'worker_{_worker_id}')
|
2019-07-05 21:50:51 +02:00
|
|
|
os.makedirs(worker_path, exist_ok=True)
|
|
|
|
settings.TEST_WORKER_DIR = worker_path
|
|
|
|
|
|
|
|
# Every process should upload to a separate directory so that
|
|
|
|
# race conditions can be avoided.
|
|
|
|
settings.LOCAL_UPLOADS_DIR = get_or_create_dev_uuid_var_path(
|
|
|
|
os.path.join("test-backend",
|
|
|
|
os.path.basename(TEST_RUN_DIR),
|
|
|
|
os.path.basename(worker_path),
|
|
|
|
"test_uploads"))
|
2020-06-25 05:10:41 +02:00
|
|
|
settings.SENDFILE_ROOT = os.path.join(settings.LOCAL_UPLOADS_DIR, "files")
|
2019-07-05 21:50:51 +02:00
|
|
|
|
2014-01-29 17:28:55 +01:00
|
|
|
class Runner(DiscoverRunner):
|
2017-02-10 12:40:14 +01:00
|
|
|
test_suite = TestSuite
|
|
|
|
test_loader = TestLoader()
|
2017-02-10 05:49:28 +01:00
|
|
|
parallel_test_suite = ParallelTestSuite
|
2017-02-10 12:40:14 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
2014-01-29 17:28:55 +01:00
|
|
|
DiscoverRunner.__init__(self, *args, **kwargs)
|
2014-01-29 00:47:48 +01:00
|
|
|
|
2016-05-20 14:53:47 +02:00
|
|
|
# `templates_rendered` holds templates which were rendered
|
|
|
|
# in proper logical tests.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.templates_rendered: Set[str] = set()
|
2016-05-20 14:53:47 +02:00
|
|
|
# `shallow_tested_templates` holds templates which were rendered
|
|
|
|
# in `zerver.tests.test_templates`.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.shallow_tested_templates: Set[str] = set()
|
2016-05-20 14:53:47 +02:00
|
|
|
template_rendered.connect(self.on_template_rendered)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_resultclass(self) -> Type[TestResult]:
|
2017-02-10 05:48:15 +01:00
|
|
|
return TextTestResult
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def on_template_rendered(self, sender: Any, context: Dict[str, Any], **kwargs: Any) -> None:
|
2016-05-20 14:53:47 +02:00
|
|
|
if hasattr(sender, 'template'):
|
|
|
|
template_name = sender.template.name
|
|
|
|
if template_name not in self.templates_rendered:
|
2016-12-01 01:00:26 +01:00
|
|
|
if context.get('shallow_tested') and template_name not in self.templates_rendered:
|
2016-05-20 14:53:47 +02:00
|
|
|
self.shallow_tested_templates.add(template_name)
|
|
|
|
else:
|
|
|
|
self.templates_rendered.add(template_name)
|
|
|
|
self.shallow_tested_templates.discard(template_name)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_shallow_tested_templates(self) -> Set[str]:
|
2016-05-20 14:53:47 +02:00
|
|
|
return self.shallow_tested_templates
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def setup_test_environment(self, *args: Any, **kwargs: Any) -> Any:
|
2017-04-13 11:24:44 +02:00
|
|
|
settings.DATABASES['default']['NAME'] = settings.BACKEND_DATABASE_TEMPLATE
|
2017-04-18 13:42:12 +02:00
|
|
|
# We create/destroy the test databases in run_tests to avoid
|
|
|
|
# duplicate work when running in parallel mode.
|
2019-06-11 18:36:27 +02:00
|
|
|
|
|
|
|
# Write the template database ids to a file that we can
|
|
|
|
# reference for cleaning them up if they leak.
|
|
|
|
filepath = os.path.join(get_dev_uuid_var_path(),
|
|
|
|
TEMPLATE_DATABASE_DIR,
|
2020-02-28 02:15:40 +01:00
|
|
|
get_database_id())
|
2019-06-11 18:36:27 +02:00
|
|
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
|
|
|
with open(filepath, "w") as f:
|
|
|
|
if self.parallel > 1:
|
|
|
|
for index in range(self.parallel):
|
2020-02-28 02:15:40 +01:00
|
|
|
f.write(get_database_id(index + 1) + "\n")
|
2019-06-11 18:36:27 +02:00
|
|
|
else:
|
2020-02-28 02:15:40 +01:00
|
|
|
f.write(get_database_id() + "\n")
|
2019-07-02 22:15:51 +02:00
|
|
|
|
2019-07-05 21:50:51 +02:00
|
|
|
# Check if we are in serial mode to avoid unnecessarily making a directory.
|
|
|
|
# We add "worker_0" in the path for consistency with parallel mode.
|
|
|
|
if self.parallel == 1:
|
|
|
|
initialize_worker_path(0)
|
|
|
|
|
2017-10-27 08:28:23 +02:00
|
|
|
return super().setup_test_environment(*args, **kwargs)
|
2017-04-11 08:05:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def teardown_test_environment(self, *args: Any, **kwargs: Any) -> Any:
|
2019-06-04 16:44:56 +02:00
|
|
|
# The test environment setup clones the zulip_test_template
|
|
|
|
# database, creating databases with names:
|
2020-02-28 02:15:40 +01:00
|
|
|
# 'zulip_test_template_N_<worker_id>',
|
|
|
|
# where N is `random_id_range_start`, and `worker_id` is a
|
|
|
|
# value between <1, self.parallel>.
|
2019-06-04 16:44:56 +02:00
|
|
|
#
|
|
|
|
# We need to delete those databases to avoid leaking disk
|
|
|
|
# (Django is smart and calls this on SIGINT too).
|
|
|
|
if self.parallel > 1:
|
|
|
|
for index in range(self.parallel):
|
|
|
|
destroy_test_databases(index + 1)
|
|
|
|
else:
|
|
|
|
destroy_test_databases()
|
2019-06-11 18:36:27 +02:00
|
|
|
|
|
|
|
# Clean up our record of which databases this process created.
|
|
|
|
filepath = os.path.join(get_dev_uuid_var_path(),
|
|
|
|
TEMPLATE_DATABASE_DIR,
|
2020-02-28 02:15:40 +01:00
|
|
|
get_database_id())
|
2019-06-11 18:36:27 +02:00
|
|
|
os.remove(filepath)
|
|
|
|
|
2019-07-02 22:15:51 +02:00
|
|
|
# Clean up our test runs root directory.
|
2019-07-15 02:10:56 +02:00
|
|
|
try:
|
2019-07-02 22:15:51 +02:00
|
|
|
shutil.rmtree(TEST_RUN_DIR)
|
2019-07-15 02:10:56 +02:00
|
|
|
except OSError:
|
|
|
|
print("Unable to clean up the test run's directory.")
|
2017-10-27 08:28:23 +02:00
|
|
|
return super().teardown_test_environment(*args, **kwargs)
|
2017-04-11 08:05:43 +02:00
|
|
|
|
2019-07-30 21:04:10 +02:00
|
|
|
def test_imports(self, test_labels: List[str], suite: Union[TestSuite, ParallelTestSuite]) -> None:
|
2018-01-18 06:47:47 +01:00
|
|
|
prefix_old = 'unittest.loader.ModuleImportFailure.' # Python <= 3.4
|
|
|
|
prefix_new = 'unittest.loader._FailedTest.' # Python > 3.4
|
|
|
|
error_prefixes = [prefix_old, prefix_new]
|
|
|
|
for test_name in get_test_names(suite):
|
|
|
|
for prefix in error_prefixes:
|
|
|
|
if test_name.startswith(prefix):
|
|
|
|
test_name = test_name[len(prefix):]
|
|
|
|
for label in test_labels:
|
|
|
|
# This code block is for Python 3.5 when test label is
|
|
|
|
# directly provided, for example:
|
|
|
|
# ./tools/test-backend zerver.tests.test_alert_words.py
|
|
|
|
#
|
|
|
|
# In this case, the test name is of this form:
|
|
|
|
# 'unittest.loader._FailedTest.test_alert_words'
|
|
|
|
#
|
|
|
|
# Whereas check_import_error requires test names of
|
|
|
|
# this form:
|
|
|
|
# 'unittest.loader._FailedTest.zerver.tests.test_alert_words'.
|
|
|
|
if test_name in label:
|
|
|
|
test_name = label
|
|
|
|
break
|
|
|
|
check_import_error(test_name)
|
|
|
|
|
2017-12-10 23:58:55 +01:00
|
|
|
def run_tests(self, test_labels: List[str],
|
|
|
|
extra_tests: Optional[List[TestCase]]=None,
|
|
|
|
full_suite: bool=False,
|
2019-01-11 01:26:11 +01:00
|
|
|
include_webhooks: bool=False,
|
2017-12-10 23:58:55 +01:00
|
|
|
**kwargs: Any) -> Tuple[bool, List[str]]:
|
2014-01-29 00:47:48 +01:00
|
|
|
self.setup_test_environment()
|
2016-05-03 21:02:20 +02:00
|
|
|
try:
|
|
|
|
suite = self.build_suite(test_labels, extra_tests)
|
|
|
|
except AttributeError:
|
2018-01-09 17:26:09 +01:00
|
|
|
# We are likely to get here only when running tests in serial
|
|
|
|
# mode on Python 3.4 or lower.
|
2018-01-18 08:19:51 +01:00
|
|
|
# test_labels are always normalized to include the correct prefix.
|
|
|
|
# If we run the command with ./tools/test-backend test_alert_words,
|
|
|
|
# test_labels will be equal to ['zerver.tests.test_alert_words'].
|
|
|
|
for test_label in test_labels:
|
|
|
|
check_import_error(test_label)
|
2017-04-18 13:42:12 +02:00
|
|
|
|
2018-01-19 06:06:15 +01:00
|
|
|
# I think we won't reach this line under normal circumstances, but
|
|
|
|
# for some unforeseen scenario in which the AttributeError was not
|
|
|
|
# caused by an import error, let's re-raise the exception for
|
|
|
|
# debugging purposes.
|
|
|
|
raise
|
|
|
|
|
2018-01-18 06:47:47 +01:00
|
|
|
self.test_imports(test_labels, suite)
|
2017-04-18 13:42:12 +02:00
|
|
|
if self.parallel == 1:
|
|
|
|
# We are running in serial mode so create the databases here.
|
|
|
|
# For parallel mode, the databases are created in init_worker.
|
|
|
|
# We don't want to create and destroy DB in setup_test_environment
|
|
|
|
# because it will be called for both serial and parallel modes.
|
|
|
|
# However, at this point we know in which mode we would be running
|
|
|
|
# since that decision has already been made in build_suite().
|
2019-05-29 00:50:12 +02:00
|
|
|
#
|
|
|
|
# We pass a _worker_id, which in this code path is always 0
|
|
|
|
destroy_test_databases(_worker_id)
|
|
|
|
create_test_databases(_worker_id)
|
2017-04-18 13:42:12 +02:00
|
|
|
|
2014-02-26 17:27:19 +01:00
|
|
|
# We have to do the next line to avoid flaky scenarios where we
|
|
|
|
# run a single test and getting an SA connection causes data from
|
|
|
|
# a Django connection to be rolled back mid-test.
|
|
|
|
get_sqlalchemy_connection()
|
2018-01-09 17:26:09 +01:00
|
|
|
result = self.run_suite(suite)
|
2014-01-29 00:47:48 +01:00
|
|
|
self.teardown_test_environment()
|
2017-02-10 05:48:15 +01:00
|
|
|
failed = self.suite_result(suite, result)
|
2016-07-28 01:40:28 +02:00
|
|
|
if not failed:
|
2019-01-11 01:26:11 +01:00
|
|
|
write_instrumentation_reports(full_suite=full_suite, include_webhooks=include_webhooks)
|
2016-12-23 18:42:45 +01:00
|
|
|
return failed, result.failed_tests
|
2017-02-10 05:49:28 +01:00
|
|
|
|
2019-07-30 21:04:10 +02:00
|
|
|
def get_test_names(suite: Union[TestSuite, ParallelTestSuite]) -> List[str]:
|
2018-01-18 07:48:20 +01:00
|
|
|
if isinstance(suite, ParallelTestSuite):
|
|
|
|
# suite is ParallelTestSuite. It will have a subsuites parameter of
|
|
|
|
# type SubSuiteList. Each element of a SubsuiteList is a tuple whose
|
|
|
|
# first element is the type of TestSuite and the second element is a
|
|
|
|
# list of test names in that test suite. See serialize_suite() for the
|
|
|
|
# implementation details.
|
2020-06-23 08:39:03 +02:00
|
|
|
assert isinstance(suite.subsuites, SubSuiteList)
|
2018-01-18 07:48:20 +01:00
|
|
|
return [name for subsuite in suite.subsuites for name in subsuite[1]]
|
|
|
|
else:
|
|
|
|
return [full_test_name(t) for t in get_tests_from_suite(suite)]
|
2017-02-10 05:49:28 +01:00
|
|
|
|
2019-07-22 21:59:52 +02:00
|
|
|
def get_tests_from_suite(suite: unittest.TestSuite) -> TestCase:
|
2017-08-25 20:01:20 +02:00
|
|
|
for test in suite:
|
2017-02-10 05:49:28 +01:00
|
|
|
if isinstance(test, TestSuite):
|
2020-04-09 21:51:58 +02:00
|
|
|
yield from get_tests_from_suite(test)
|
2017-02-10 05:49:28 +01:00
|
|
|
else:
|
|
|
|
yield test
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def serialize_suite(suite: TestSuite) -> Tuple[Type[TestSuite], List[str]]:
|
2017-02-10 05:49:28 +01:00
|
|
|
return type(suite), get_test_names(suite)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def deserialize_suite(args: Tuple[Type[TestSuite], List[str]]) -> TestSuite:
|
2017-02-10 05:49:28 +01:00
|
|
|
suite_class, test_names = args
|
2017-10-28 01:57:22 +02:00
|
|
|
suite = suite_class()
|
2017-02-10 05:49:28 +01:00
|
|
|
tests = TestLoader().loadTestsFromNames(test_names)
|
|
|
|
for test in get_tests_from_suite(tests):
|
|
|
|
suite.addTest(test)
|
|
|
|
return suite
|
|
|
|
|
|
|
|
class RemoteTestRunner(django_runner.RemoteTestRunner):
|
|
|
|
resultclass = RemoteTestResult
|
|
|
|
|
2017-11-02 18:32:14 +01:00
|
|
|
class SubSuiteList(List[Tuple[Type[TestSuite], List[str]]]):
|
2017-02-10 05:49:28 +01:00
|
|
|
"""
|
|
|
|
This class allows us to avoid changing the main logic of
|
|
|
|
ParallelTestSuite and still make it serializable.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, suites: List[TestSuite]) -> None:
|
2017-02-10 05:49:28 +01:00
|
|
|
serialized_suites = [serialize_suite(s) for s in suites]
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(serialized_suites)
|
2017-02-10 05:49:28 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __getitem__(self, index: Any) -> Any:
|
2017-10-27 08:28:23 +02:00
|
|
|
suite = super().__getitem__(index)
|
2017-02-10 05:49:28 +01:00
|
|
|
return deserialize_suite(suite)
|