2020-06-19 00:32:55 +02:00
|
|
|
import collections
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import time
|
2021-07-19 23:41:37 +02:00
|
|
|
import weakref
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2020-08-14 09:41:22 +02:00
|
|
|
from functools import wraps
|
2017-10-28 22:52:40 +02:00
|
|
|
from typing import (
|
2020-06-11 00:54:34 +02:00
|
|
|
IO,
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Generator,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
2020-08-07 04:45:55 +02:00
|
|
|
cast,
|
2017-10-28 22:52:40 +02:00
|
|
|
)
|
2020-06-19 00:32:55 +02:00
|
|
|
from unittest import mock
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
import boto3
|
2020-06-19 00:32:55 +02:00
|
|
|
import fakeldap
|
|
|
|
import ldap
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2021-07-09 17:40:21 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db.migrations.state import StateApps
|
2021-07-09 10:10:58 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
|
|
|
from django.http.request import QueryDict
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.test import override_settings
|
|
|
|
from django.urls import URLResolver
|
2020-06-19 00:32:55 +02:00
|
|
|
from moto import mock_s3
|
2021-08-10 02:11:16 +02:00
|
|
|
from mypy_boto3_s3.service_resource import Bucket
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2017-10-28 22:52:40 +02:00
|
|
|
import zerver.lib.upload
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib import cache
|
2020-03-12 14:17:25 +01:00
|
|
|
from zerver.lib.actions import do_set_realm_property
|
2016-12-19 08:48:03 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2017-02-13 09:19:52 +01:00
|
|
|
from zerver.lib.cache import get_cache_backend
|
2020-05-04 02:36:15 +02:00
|
|
|
from zerver.lib.db import Params, ParamsT, Query, TimeTrackingCursor
|
2019-01-11 01:26:11 +01:00
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.notes import BaseNotes
|
|
|
|
from zerver.lib.request import RequestNotes
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.upload import LocalUploadBackend, S3UploadBackend
|
2014-01-27 23:43:02 +01:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
Message,
|
2020-03-12 14:17:25 +01:00
|
|
|
Realm,
|
2014-01-27 23:43:02 +01:00
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
2016-06-03 02:10:13 +02:00
|
|
|
UserProfile,
|
2021-04-29 12:54:30 +02:00
|
|
|
get_client,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_realm,
|
|
|
|
get_stream,
|
2014-01-27 23:43:02 +01:00
|
|
|
)
|
2020-07-01 00:43:55 +02:00
|
|
|
from zerver.tornado.handlers import AsyncDjangoHandler, allocate_handler_id
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.worker import queue_processors
|
2021-07-09 17:40:21 +02:00
|
|
|
from zilencer.models import RemoteZulipServer
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
|
2020-02-23 18:58:08 +01:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
if TYPE_CHECKING:
|
2018-12-17 20:14:47 +01:00
|
|
|
# Avoid an import cycle; we only need these for type annotations.
|
2020-06-19 00:32:55 +02:00
|
|
|
from zerver.lib.test_classes import MigrationsTestCase, ZulipTestCase
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-12-13 10:59:54 +01:00
|
|
|
|
|
|
|
class MockLDAP(fakeldap.MockLDAP):
|
|
|
|
class LDAPError(ldap.LDAPError):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class INVALID_CREDENTIALS(ldap.INVALID_CREDENTIALS):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class NO_SUCH_OBJECT(ldap.NO_SUCH_OBJECT):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class ALREADY_EXISTS(ldap.ALREADY_EXISTS):
|
|
|
|
pass
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
@contextmanager
|
2021-02-12 08:19:30 +01:00
|
|
|
def stub_event_queue_user_events(
|
|
|
|
event_queue_return: Any, user_events_return: Any
|
|
|
|
) -> Iterator[None]:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.events.request_event_queue", return_value=event_queue_return):
|
|
|
|
with mock.patch("zerver.lib.events.get_user_events", return_value=user_events_return):
|
2017-07-27 06:31:26 +02:00
|
|
|
yield
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2021-02-16 00:53:47 +01:00
|
|
|
def simulated_queue_client(client: Callable[[], object]) -> Iterator[None]:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(queue_processors, "SimpleQueueClient", client):
|
2020-07-01 00:43:55 +02:00
|
|
|
yield
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-15 14:59:13 +02:00
|
|
|
@contextmanager
|
|
|
|
def cache_tries_captured() -> Iterator[List[Tuple[str, Union[str, List[str]], Optional[str]]]]:
|
|
|
|
cache_queries: List[Tuple[str, Union[str, List[str]], Optional[str]]] = []
|
|
|
|
|
|
|
|
orig_get = cache.cache_get
|
|
|
|
orig_get_many = cache.cache_get_many
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get(key: str, cache_name: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("get", key, cache_name))
|
2020-10-15 14:59:13 +02:00
|
|
|
return orig_get(key, cache_name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get_many(
|
|
|
|
keys: List[str], cache_name: Optional[str] = None
|
|
|
|
) -> Dict[str, Any]: # nocoverage -- simulated code doesn't use this
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("getmany", keys, cache_name))
|
2020-10-15 14:59:13 +02:00
|
|
|
return orig_get_many(keys, cache_name)
|
|
|
|
|
|
|
|
with mock.patch.multiple(cache, cache_get=my_cache_get, cache_get_many=my_cache_get_many):
|
|
|
|
yield cache_queries
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2020-07-05 01:58:06 +02:00
|
|
|
def simulated_empty_cache() -> Iterator[List[Tuple[str, Union[str, List[str]], Optional[str]]]]:
|
|
|
|
cache_queries: List[Tuple[str, Union[str, List[str]], Optional[str]]] = []
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get(key: str, cache_name: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("get", key, cache_name))
|
2014-01-27 22:53:36 +01:00
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get_many(
|
|
|
|
keys: List[str], cache_name: Optional[str] = None
|
|
|
|
) -> Dict[str, Any]: # nocoverage -- simulated code doesn't use this
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("getmany", keys, cache_name))
|
2017-02-11 05:26:24 +01:00
|
|
|
return {}
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2020-10-15 14:50:07 +02:00
|
|
|
with mock.patch.multiple(cache, cache_get=my_cache_get, cache_get_many=my_cache_get_many):
|
|
|
|
yield cache_queries
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2021-02-12 08:19:30 +01:00
|
|
|
def queries_captured(
|
|
|
|
include_savepoints: bool = False, keep_cache_warm: bool = False
|
|
|
|
) -> Generator[List[Dict[str, Union[str, bytes]]], None, None]:
|
|
|
|
"""
|
2014-01-27 22:53:36 +01:00
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2014-01-27 22:53:36 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
queries: List[Dict[str, Union[str, bytes]]] = []
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def wrapper_execute(
|
|
|
|
self: TimeTrackingCursor,
|
2021-08-10 00:58:58 +02:00
|
|
|
action: Callable[[Query, ParamsT], None],
|
2021-02-12 08:19:30 +01:00
|
|
|
sql: Query,
|
|
|
|
params: ParamsT,
|
|
|
|
) -> None:
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return action(sql, params)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
2021-02-12 08:20:45 +01:00
|
|
|
if include_savepoints or not isinstance(sql, str) or "SAVEPOINT" not in sql:
|
2021-02-12 08:19:30 +01:00
|
|
|
queries.append(
|
|
|
|
{
|
2021-08-02 23:20:39 +02:00
|
|
|
"sql": self.mogrify(sql, params).decode(),
|
2021-02-12 08:20:45 +01:00
|
|
|
"time": f"{duration:.3f}",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
def cursor_execute(
|
|
|
|
self: TimeTrackingCursor, sql: Query, params: Optional[Params] = None
|
|
|
|
) -> None:
|
2020-04-22 03:51:22 +02:00
|
|
|
return wrapper_execute(self, super(TimeTrackingCursor, self).execute, sql, params)
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def cursor_executemany(self: TimeTrackingCursor, sql: Query, params: Iterable[Params]) -> None:
|
|
|
|
return wrapper_execute(
|
|
|
|
self, super(TimeTrackingCursor, self).executemany, sql, params
|
|
|
|
) # nocoverage -- doesn't actually get used in tests
|
2014-01-27 22:53:36 +01:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
if not keep_cache_warm:
|
|
|
|
cache = get_cache_backend(None)
|
|
|
|
cache.clear()
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch.multiple(
|
|
|
|
TimeTrackingCursor, execute=cursor_execute, executemany=cursor_executemany
|
|
|
|
):
|
2020-07-01 00:43:55 +02:00
|
|
|
yield queries
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-09 22:58:43 +01:00
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def stdout_suppressed() -> Iterator[IO[str]]:
|
2017-02-09 22:58:43 +01:00
|
|
|
"""Redirect stdout to /dev/null."""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.devnull, "a") as devnull:
|
2017-08-25 20:01:20 +02:00
|
|
|
stdout, sys.stdout = sys.stdout, devnull
|
2017-02-09 22:58:43 +01:00
|
|
|
yield stdout
|
|
|
|
sys.stdout = stdout
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
def reset_emails_in_zulip_realm() -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def get_test_image_file(filename: str) -> IO[bytes]:
|
2021-02-12 08:20:45 +01:00
|
|
|
test_avatar_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../tests/images"))
|
|
|
|
return open(os.path.join(test_avatar_dir, filename), "rb")
|
2016-12-19 08:48:03 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def avatar_disk_path(
|
|
|
|
user_profile: UserProfile, medium: bool = False, original: bool = False
|
|
|
|
) -> str:
|
2016-12-19 08:48:03 +01:00
|
|
|
avatar_url_path = avatar_url(user_profile, medium)
|
2020-07-05 01:58:06 +02:00
|
|
|
assert avatar_url_path is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
avatar_disk_path = os.path.join(
|
|
|
|
settings.LOCAL_UPLOADS_DIR,
|
|
|
|
"avatars",
|
|
|
|
avatar_url_path.split("/")[-2],
|
|
|
|
avatar_url_path.split("/")[-1].split("?")[0],
|
|
|
|
)
|
2018-06-06 14:30:26 +02:00
|
|
|
if original:
|
2019-02-14 13:37:32 +01:00
|
|
|
return avatar_disk_path.replace(".png", ".original")
|
2016-12-19 08:48:03 +01:00
|
|
|
return avatar_disk_path
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def make_client(name: str) -> Client:
|
2016-09-13 23:32:35 +02:00
|
|
|
client, _ = Client.objects.get_or_create(name=name)
|
|
|
|
return client
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def find_key_by_email(address: str) -> Optional[str]:
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-11 20:52:27 +02:00
|
|
|
key_regex = re.compile("accounts/do_confirm/([a-z0-9]{24})>")
|
2014-01-27 23:43:02 +01:00
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
2020-07-05 01:58:06 +02:00
|
|
|
match = key_regex.search(message.body)
|
|
|
|
assert match is not None
|
|
|
|
[key] = match.groups()
|
|
|
|
return key
|
2017-03-05 09:06:36 +01:00
|
|
|
return None # nocoverage -- in theory a test might want this case, but none do
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_stream_count(user_profile: UserProfile) -> int:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserMessage.objects.select_related("message").filter(user_profile=user_profile).count()
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_usermessage(user_profile: UserProfile) -> UserMessage:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = (
|
|
|
|
UserMessage.objects.select_related("message")
|
|
|
|
.filter(user_profile=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("-message")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-05-17 21:15:50 +02:00
|
|
|
return query[0] # Django does LIMIT here
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_message(user_profile: UserProfile) -> Message:
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_subscription(stream_name: str, user_profile: UserProfile) -> Subscription:
|
2017-01-30 04:31:24 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient_id = stream.recipient_id
|
2021-02-12 08:19:30 +01:00
|
|
|
return Subscription.objects.get(
|
|
|
|
user_profile=user_profile, recipient_id=recipient_id, active=True
|
|
|
|
)
|
|
|
|
|
2017-01-30 04:31:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_messages(user_profile: UserProfile) -> List[Message]:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = (
|
|
|
|
UserMessage.objects.select_related("message")
|
|
|
|
.filter(user_profile=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("message")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2014-01-31 16:44:45 +01:00
|
|
|
return [um.message for um in query]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-01 00:43:55 +02:00
|
|
|
class DummyHandler(AsyncDjangoHandler):
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self) -> None:
|
2020-07-01 00:43:55 +02:00
|
|
|
allocate_handler_id(self)
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-09 10:10:58 +02:00
|
|
|
class HostRequestMock(HttpRequest):
|
2021-02-07 21:34:01 +01:00
|
|
|
"""A mock request object where get_host() works. Useful for testing
|
|
|
|
routes that use Zulip's subdomains feature"""
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-02-07 21:34:01 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
post_data: Dict[str, Any] = {},
|
2021-07-09 17:40:21 +02:00
|
|
|
user_profile: Optional[Union[UserProfile, AnonymousUser, RemoteZulipServer]] = None,
|
2021-02-07 21:34:01 +01:00
|
|
|
host: str = settings.EXTERNAL_HOST,
|
2021-04-29 12:54:30 +02:00
|
|
|
client_name: Optional[str] = None,
|
2021-07-09 10:10:58 +02:00
|
|
|
meta_data: Optional[Dict[str, Any]] = None,
|
2021-07-09 17:40:21 +02:00
|
|
|
tornado_handler: Optional[AsyncDjangoHandler] = DummyHandler(),
|
2021-08-18 18:42:39 +02:00
|
|
|
path: str = "",
|
2021-02-07 21:34:01 +01:00
|
|
|
) -> None:
|
|
|
|
self.host = host
|
2021-07-09 10:10:58 +02:00
|
|
|
self.GET = QueryDict(mutable=True)
|
2021-02-07 21:34:01 +01:00
|
|
|
self.method = ""
|
2020-01-28 06:45:32 +01:00
|
|
|
|
|
|
|
# Convert any integer parameters passed into strings, even
|
|
|
|
# though of course the HTTP API would do so. Ideally, we'd
|
|
|
|
# get rid of this abstraction entirely and just use the HTTP
|
2021-02-07 21:34:01 +01:00
|
|
|
# API directly, but while it exists, we need this code
|
2021-07-09 10:10:58 +02:00
|
|
|
self.POST = QueryDict(mutable=True)
|
2020-01-28 06:45:32 +01:00
|
|
|
for key in post_data:
|
|
|
|
self.POST[key] = str(post_data[key])
|
2021-02-07 21:34:01 +01:00
|
|
|
self.method = "POST"
|
2020-01-28 06:45:32 +01:00
|
|
|
|
2021-07-09 10:10:58 +02:00
|
|
|
if meta_data is None:
|
|
|
|
self.META = {"PATH_INFO": "test"}
|
|
|
|
else:
|
|
|
|
self.META = meta_data
|
2021-08-18 18:42:39 +02:00
|
|
|
self.path = path
|
2017-08-04 03:59:52 +02:00
|
|
|
self.user = user_profile
|
2021-07-09 10:10:58 +02:00
|
|
|
self._body = b""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.content_type = ""
|
2021-08-21 19:24:20 +02:00
|
|
|
BaseNotes[str, str].get_notes
|
|
|
|
|
|
|
|
RequestNotes.set_notes(
|
|
|
|
self,
|
|
|
|
RequestNotes(
|
|
|
|
client_name="",
|
|
|
|
log_data={},
|
|
|
|
tornado_handler=None if tornado_handler is None else weakref.ref(tornado_handler),
|
|
|
|
client=get_client(client_name) if client_name is not None else None,
|
|
|
|
),
|
2021-07-09 10:06:04 +02:00
|
|
|
)
|
|
|
|
|
2021-07-09 10:10:58 +02:00
|
|
|
@property
|
|
|
|
def body(self) -> bytes:
|
|
|
|
return super().body
|
|
|
|
|
|
|
|
@body.setter
|
|
|
|
def body(self, val: bytes) -> None:
|
|
|
|
self._body = val
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_host(self) -> str:
|
2016-09-28 06:06:21 +02:00
|
|
|
return self.host
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
INSTRUMENTING = os.environ.get("TEST_INSTRUMENT_URL_COVERAGE", "") == "TRUE"
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
INSTRUMENTED_CALLS: List[Dict[str, Any]] = []
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2020-08-07 04:45:55 +02:00
|
|
|
UrlFuncT = TypeVar("UrlFuncT", bound=Callable[..., HttpResponse]) # TODO: make more specific
|
2016-09-12 03:06:25 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def append_instrumentation_data(data: Dict[str, Any]) -> None:
|
2017-02-10 05:42:41 +01:00
|
|
|
INSTRUMENTED_CALLS.append(data)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def instrument_url(f: UrlFuncT) -> UrlFuncT:
|
2017-03-05 09:06:36 +01:00
|
|
|
if not INSTRUMENTING: # nocoverage -- option is always enabled; should we remove?
|
2016-07-28 01:40:28 +02:00
|
|
|
return f
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def wrapper(
|
2021-02-12 08:20:45 +01:00
|
|
|
self: "ZulipTestCase", url: str, info: object = {}, **kwargs: Any
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2016-07-28 01:40:28 +02:00
|
|
|
start = time.time()
|
|
|
|
result = f(self, url, info, **kwargs)
|
|
|
|
delay = time.time() - start
|
|
|
|
test_name = self.id()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "?" in url:
|
|
|
|
url, extra_info = url.split("?", 1)
|
2016-07-28 01:40:28 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_info = ""
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2021-02-07 21:34:01 +01:00
|
|
|
if isinstance(info, HostRequestMock):
|
|
|
|
info = "<HostRequestMock>"
|
2020-08-07 01:09:47 +02:00
|
|
|
elif isinstance(info, bytes):
|
|
|
|
info = "<bytes>"
|
|
|
|
elif isinstance(info, dict):
|
|
|
|
info = {
|
|
|
|
k: "<file object>" if hasattr(v, "read") and callable(getattr(v, "read")) else v
|
|
|
|
for k, v in info.items()
|
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
append_instrumentation_data(
|
|
|
|
dict(
|
|
|
|
url=url,
|
|
|
|
status_code=result.status_code,
|
|
|
|
method=f.__name__,
|
|
|
|
delay=delay,
|
|
|
|
extra_info=extra_info,
|
|
|
|
info=info,
|
|
|
|
test_name=test_name,
|
|
|
|
kwargs=kwargs,
|
|
|
|
)
|
|
|
|
)
|
2016-07-28 01:40:28 +02:00
|
|
|
return result
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-07 04:45:55 +02:00
|
|
|
return cast(UrlFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-11 01:26:11 +01:00
|
|
|
def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> None:
|
2016-07-28 01:40:28 +02:00
|
|
|
if INSTRUMENTING:
|
2016-07-28 02:40:04 +02:00
|
|
|
calls = INSTRUMENTED_CALLS
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
from zproject.urls import urlpatterns, v1_api_and_json_patterns
|
|
|
|
|
|
|
|
# Find our untested urls.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
pattern_cnt: Dict[str, int] = collections.defaultdict(int)
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def re_strip(r: Any) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return str(r).lstrip("^").rstrip("$")
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
|
2016-11-24 19:45:40 +01:00
|
|
|
for pattern in patterns:
|
|
|
|
find_pattern(pattern, prefixes)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def cleanup_url(url: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if url.startswith("/"):
|
2016-11-24 19:45:40 +01:00
|
|
|
url = url[1:]
|
2021-02-12 08:20:45 +01:00
|
|
|
if url.startswith("http://testserver/"):
|
|
|
|
url = url[len("http://testserver/") :]
|
|
|
|
if url.startswith("http://zulip.testserver/"):
|
|
|
|
url = url[len("http://zulip.testserver/") :]
|
|
|
|
if url.startswith("http://testserver:9080/"):
|
|
|
|
url = url[len("http://testserver:9080/") :]
|
2016-11-24 19:45:40 +01:00
|
|
|
return url
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_pattern(pattern: Any, prefixes: List[str]) -> None:
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
if isinstance(pattern, type(URLResolver)):
|
2017-03-05 09:06:36 +01:00
|
|
|
return # nocoverage -- shouldn't actually happen
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(pattern, "url_patterns"):
|
2016-11-24 19:45:40 +01:00
|
|
|
return
|
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
canon_pattern = prefixes[0] + re_strip(pattern.pattern.regex.pattern)
|
2016-11-24 19:45:40 +01:00
|
|
|
cnt = 0
|
|
|
|
for call in calls:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "pattern" in call:
|
2016-11-24 19:45:40 +01:00
|
|
|
continue
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
url = cleanup_url(call["url"])
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
for prefix in prefixes:
|
|
|
|
if url.startswith(prefix):
|
2021-02-12 08:19:30 +01:00
|
|
|
match_url = url[len(prefix) :]
|
2018-02-02 05:43:18 +01:00
|
|
|
if pattern.resolve(match_url):
|
2021-02-12 08:20:45 +01:00
|
|
|
if call["status_code"] in [200, 204, 301, 302]:
|
2016-11-24 19:45:40 +01:00
|
|
|
cnt += 1
|
2021-02-12 08:20:45 +01:00
|
|
|
call["pattern"] = canon_pattern
|
2016-11-24 19:45:40 +01:00
|
|
|
pattern_cnt[canon_pattern] += cnt
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
find_patterns(urlpatterns, ["", "en/", "de/"])
|
|
|
|
find_patterns(v1_api_and_json_patterns, ["api/v1/", "json/"])
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
assert len(pattern_cnt) > 100
|
2020-05-07 13:56:49 +02:00
|
|
|
untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
exempt_patterns = {
|
2017-03-23 19:59:24 +01:00
|
|
|
# We exempt some patterns that are called via Tornado.
|
2021-02-12 08:20:45 +01:00
|
|
|
"api/v1/events",
|
|
|
|
"api/v1/events/internal",
|
|
|
|
"api/v1/register",
|
2017-03-23 19:59:24 +01:00
|
|
|
# We also exempt some development environment debugging
|
|
|
|
# static content URLs, since the content they point to may
|
|
|
|
# or may not exist.
|
2021-02-12 08:20:45 +01:00
|
|
|
"coverage/(?P<path>.+)",
|
|
|
|
"confirmation_key/",
|
|
|
|
"node-coverage/(?P<path>.+)",
|
|
|
|
"docs/(?P<path>.+)",
|
2021-09-18 00:52:26 +02:00
|
|
|
"help/add-custom-emoji",
|
2021-09-18 01:03:04 +02:00
|
|
|
"help/configure-who-can-add-custom-emoji",
|
2021-06-22 16:21:15 +02:00
|
|
|
"help/change-the-topic-of-a-message",
|
2021-04-20 23:38:23 +02:00
|
|
|
"help/configure-missed-message-emails",
|
2021-06-17 01:11:33 +02:00
|
|
|
"help/community-topic-edits",
|
2021-09-15 01:42:35 +02:00
|
|
|
"help/about-streams-and-topics",
|
2021-04-01 15:15:40 +02:00
|
|
|
"help/delete-a-stream",
|
2021-09-20 21:02:46 +02:00
|
|
|
"help/add-an-alert-word",
|
|
|
|
"help/change-notification-sound",
|
|
|
|
"help/configure-message-notification-emails",
|
|
|
|
"help/disable-new-login-emails",
|
|
|
|
"help/test-mobile-notifications",
|
|
|
|
"help/troubleshooting-desktop-notifications",
|
2021-07-17 00:37:09 +02:00
|
|
|
"for/working-groups-and-communities/",
|
2021-07-28 02:01:18 +02:00
|
|
|
"help/only-allow-admins-to-add-emoji",
|
2021-04-01 15:15:40 +02:00
|
|
|
"api/delete-stream",
|
2021-02-12 08:20:45 +01:00
|
|
|
"casper/(?P<path>.+)",
|
|
|
|
"static/(?P<path>.+)",
|
2021-03-24 12:18:30 +01:00
|
|
|
"flush_caches",
|
2021-05-07 00:38:24 +02:00
|
|
|
"external_content/(?P<digest>[^/]+)/(?P<received_url>[^/]+)",
|
2020-09-02 06:59:07 +02:00
|
|
|
*(webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks),
|
|
|
|
}
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
untested_patterns -= exempt_patterns
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
var_dir = "var" # TODO make sure path is robust here
|
|
|
|
fn = os.path.join(var_dir, "url_coverage.txt")
|
|
|
|
with open(fn, "wb") as f:
|
2016-07-28 02:40:04 +02:00
|
|
|
for call in calls:
|
2020-08-07 01:09:47 +02:00
|
|
|
f.write(orjson.dumps(call, option=orjson.OPT_APPEND_NEWLINE))
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
if full_suite:
|
2021-02-12 08:20:45 +01:00
|
|
|
print(f"INFO: URL coverage report is in {fn}")
|
|
|
|
print("INFO: Try running: ./tools/create-test-api-docs")
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2017-03-05 09:06:36 +01:00
|
|
|
if full_suite and len(untested_patterns): # nocoverage -- test suite error handling
|
2016-11-19 01:28:28 +01:00
|
|
|
print("\nERROR: Some URLs are untested! Here's the list of untested URLs:")
|
2016-07-28 02:40:04 +02:00
|
|
|
for untested_pattern in sorted(untested_patterns):
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f" {untested_pattern}")
|
2016-11-19 01:28:28 +01:00
|
|
|
sys.exit(1)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def load_subdomain_token(response: HttpResponse) -> ExternalAuthDataDict:
|
2017-10-27 02:45:38 +02:00
|
|
|
assert isinstance(response, HttpResponseRedirect)
|
2021-02-12 08:20:45 +01:00
|
|
|
token = response.url.rsplit("/", 1)[1]
|
2020-02-23 18:58:08 +01:00
|
|
|
data = ExternalAuthResult(login_token=token, delete_stored_data=False).data_dict
|
2020-01-23 12:21:55 +01:00
|
|
|
assert data is not None
|
|
|
|
return data
|
2017-10-28 22:52:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
FuncT = TypeVar("FuncT", bound=Callable[..., None])
|
2017-10-28 22:52:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def use_s3_backend(method: FuncT) -> FuncT:
|
2018-12-07 17:52:01 +01:00
|
|
|
@mock_s3
|
2017-10-28 22:52:40 +02:00
|
|
|
@override_settings(LOCAL_UPLOADS_DIR=None)
|
2017-11-05 11:15:10 +01:00
|
|
|
def new_method(*args: Any, **kwargs: Any) -> Any:
|
2017-10-28 22:52:40 +02:00
|
|
|
zerver.lib.upload.upload_backend = S3UploadBackend()
|
|
|
|
try:
|
|
|
|
return method(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
zerver.lib.upload.upload_backend = LocalUploadBackend()
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-28 22:52:40 +02:00
|
|
|
return new_method
|
2018-04-25 00:37:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def create_s3_buckets(*bucket_names: str) -> List[Bucket]:
|
2018-12-07 17:52:01 +01:00
|
|
|
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2021-02-12 08:20:45 +01:00
|
|
|
s3 = session.resource("s3")
|
2018-12-07 17:52:01 +01:00
|
|
|
buckets = [s3.create_bucket(Bucket=name) for name in bucket_names]
|
2018-12-07 18:15:51 +01:00
|
|
|
return buckets
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-16 00:53:47 +01:00
|
|
|
def use_db_models(
|
|
|
|
method: Callable[["MigrationsTestCase", StateApps], None]
|
|
|
|
) -> Callable[["MigrationsTestCase", StateApps], None]: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
def method_patched_with_mock(self: "MigrationsTestCase", apps: StateApps) -> None:
|
|
|
|
ArchivedAttachment = apps.get_model("zerver", "ArchivedAttachment")
|
|
|
|
ArchivedMessage = apps.get_model("zerver", "ArchivedMessage")
|
|
|
|
ArchivedUserMessage = apps.get_model("zerver", "ArchivedUserMessage")
|
|
|
|
Attachment = apps.get_model("zerver", "Attachment")
|
|
|
|
BotConfigData = apps.get_model("zerver", "BotConfigData")
|
|
|
|
BotStorageData = apps.get_model("zerver", "BotStorageData")
|
|
|
|
Client = apps.get_model("zerver", "Client")
|
|
|
|
CustomProfileField = apps.get_model("zerver", "CustomProfileField")
|
|
|
|
CustomProfileFieldValue = apps.get_model("zerver", "CustomProfileFieldValue")
|
|
|
|
DefaultStream = apps.get_model("zerver", "DefaultStream")
|
|
|
|
DefaultStreamGroup = apps.get_model("zerver", "DefaultStreamGroup")
|
|
|
|
EmailChangeStatus = apps.get_model("zerver", "EmailChangeStatus")
|
|
|
|
Huddle = apps.get_model("zerver", "Huddle")
|
|
|
|
Message = apps.get_model("zerver", "Message")
|
|
|
|
MultiuseInvite = apps.get_model("zerver", "MultiuseInvite")
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic = apps.get_model("zerver", "UserTopic")
|
2021-02-12 08:20:45 +01:00
|
|
|
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
|
|
|
|
PushDeviceToken = apps.get_model("zerver", "PushDeviceToken")
|
|
|
|
Reaction = apps.get_model("zerver", "Reaction")
|
|
|
|
Realm = apps.get_model("zerver", "Realm")
|
|
|
|
RealmAuditLog = apps.get_model("zerver", "RealmAuditLog")
|
|
|
|
RealmDomain = apps.get_model("zerver", "RealmDomain")
|
|
|
|
RealmEmoji = apps.get_model("zerver", "RealmEmoji")
|
|
|
|
RealmFilter = apps.get_model("zerver", "RealmFilter")
|
|
|
|
Recipient = apps.get_model("zerver", "Recipient")
|
2019-12-25 22:18:36 +01:00
|
|
|
Recipient.PERSONAL = 1
|
|
|
|
Recipient.STREAM = 2
|
|
|
|
Recipient.HUDDLE = 3
|
2021-02-12 08:20:45 +01:00
|
|
|
ScheduledEmail = apps.get_model("zerver", "ScheduledEmail")
|
|
|
|
ScheduledMessage = apps.get_model("zerver", "ScheduledMessage")
|
|
|
|
Service = apps.get_model("zerver", "Service")
|
|
|
|
Stream = apps.get_model("zerver", "Stream")
|
|
|
|
Subscription = apps.get_model("zerver", "Subscription")
|
|
|
|
UserActivity = apps.get_model("zerver", "UserActivity")
|
|
|
|
UserActivityInterval = apps.get_model("zerver", "UserActivityInterval")
|
|
|
|
UserGroup = apps.get_model("zerver", "UserGroup")
|
|
|
|
UserGroupMembership = apps.get_model("zerver", "UserGroupMembership")
|
|
|
|
UserHotspot = apps.get_model("zerver", "UserHotspot")
|
|
|
|
UserMessage = apps.get_model("zerver", "UserMessage")
|
|
|
|
UserPresence = apps.get_model("zerver", "UserPresence")
|
|
|
|
UserProfile = apps.get_model("zerver", "UserProfile")
|
2018-04-25 00:37:55 +02:00
|
|
|
|
|
|
|
zerver_models_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.models",
|
2018-04-25 00:37:55 +02:00
|
|
|
ArchivedAttachment=ArchivedAttachment,
|
|
|
|
ArchivedMessage=ArchivedMessage,
|
|
|
|
ArchivedUserMessage=ArchivedUserMessage,
|
|
|
|
Attachment=Attachment,
|
|
|
|
BotConfigData=BotConfigData,
|
|
|
|
BotStorageData=BotStorageData,
|
|
|
|
Client=Client,
|
|
|
|
CustomProfileField=CustomProfileField,
|
|
|
|
CustomProfileFieldValue=CustomProfileFieldValue,
|
|
|
|
DefaultStream=DefaultStream,
|
|
|
|
DefaultStreamGroup=DefaultStreamGroup,
|
|
|
|
EmailChangeStatus=EmailChangeStatus,
|
|
|
|
Huddle=Huddle,
|
|
|
|
Message=Message,
|
|
|
|
MultiuseInvite=MultiuseInvite,
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic=UserTopic,
|
2018-04-25 00:37:55 +02:00
|
|
|
PreregistrationUser=PreregistrationUser,
|
|
|
|
PushDeviceToken=PushDeviceToken,
|
|
|
|
Reaction=Reaction,
|
|
|
|
Realm=Realm,
|
|
|
|
RealmAuditLog=RealmAuditLog,
|
|
|
|
RealmDomain=RealmDomain,
|
|
|
|
RealmEmoji=RealmEmoji,
|
|
|
|
RealmFilter=RealmFilter,
|
|
|
|
Recipient=Recipient,
|
|
|
|
ScheduledEmail=ScheduledEmail,
|
|
|
|
ScheduledMessage=ScheduledMessage,
|
|
|
|
Service=Service,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserActivity=UserActivity,
|
|
|
|
UserActivityInterval=UserActivityInterval,
|
|
|
|
UserGroup=UserGroup,
|
|
|
|
UserGroupMembership=UserGroupMembership,
|
|
|
|
UserHotspot=UserHotspot,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserPresence=UserPresence,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
UserProfile=UserProfile,
|
2018-04-25 00:37:55 +02:00
|
|
|
)
|
|
|
|
zerver_test_helpers_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.test_helpers",
|
2018-04-25 00:37:55 +02:00
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
|
|
|
zerver_test_classes_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.test_classes",
|
2018-04-25 00:37:55 +02:00
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Realm=Realm,
|
|
|
|
Recipient=Recipient,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with zerver_models_patch, zerver_test_helpers_patch, zerver_test_classes_patch:
|
2018-04-25 00:37:55 +02:00
|
|
|
method(self, apps)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-25 00:37:55 +02:00
|
|
|
return method_patched_with_mock
|
2019-08-04 20:45:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-04 20:45:24 +02:00
|
|
|
def create_dummy_file(filename: str) -> str:
|
|
|
|
filepath = os.path.join(settings.TEST_WORKER_DIR, filename)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(filepath, "w") as f:
|
|
|
|
f.write("zulip!")
|
2019-08-04 20:45:24 +02:00
|
|
|
return filepath
|
2020-07-16 18:08:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-16 18:08:15 +02:00
|
|
|
def zulip_reaction_info() -> Dict[str, str]:
|
|
|
|
return dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_name="zulip",
|
|
|
|
emoji_code="zulip",
|
|
|
|
reaction_type="zulip_extra_emoji",
|
2020-07-16 18:08:15 +02:00
|
|
|
)
|
2020-08-14 09:41:22 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-14 09:41:22 +02:00
|
|
|
@contextmanager
|
|
|
|
def mock_queue_publish(
|
2021-02-12 08:19:30 +01:00
|
|
|
method_to_patch: str,
|
|
|
|
**kwargs: object,
|
2020-08-14 09:41:22 +02:00
|
|
|
) -> Iterator[mock.MagicMock]:
|
|
|
|
inner = mock.MagicMock(**kwargs)
|
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
# This helper ensures that events published to the queues are
|
|
|
|
# serializable as JSON; unserializable events would make RabbitMQ
|
|
|
|
# crash in production.
|
2020-08-14 09:41:22 +02:00
|
|
|
def verify_serialize(
|
|
|
|
queue_name: str,
|
|
|
|
event: Dict[str, object],
|
|
|
|
processor: Optional[Callable[[object], None]] = None,
|
|
|
|
) -> None:
|
|
|
|
marshalled_event = orjson.loads(orjson.dumps(event))
|
|
|
|
assert marshalled_event == event
|
|
|
|
inner(queue_name, event, processor)
|
|
|
|
|
|
|
|
with mock.patch(method_to_patch, side_effect=verify_serialize):
|
|
|
|
yield inner
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def patch_queue_publish(
|
|
|
|
method_to_patch: str,
|
|
|
|
) -> Callable[[Callable[..., None]], Callable[..., None]]:
|
2020-08-14 09:41:22 +02:00
|
|
|
def inner(func: Callable[..., None]) -> Callable[..., None]:
|
|
|
|
@wraps(func)
|
|
|
|
def _wrapped(*args: object, **kwargs: object) -> None:
|
|
|
|
with mock_queue_publish(method_to_patch) as m:
|
|
|
|
func(*args, m, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-14 09:41:22 +02:00
|
|
|
return _wrapped
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-14 09:41:22 +02:00
|
|
|
return inner
|