2020-06-19 00:32:55 +02:00
|
|
|
import collections
|
2023-11-15 22:25:00 +01:00
|
|
|
import itertools
|
2020-06-19 00:32:55 +02:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import time
|
2016-05-18 20:35:35 +02:00
|
|
|
from contextlib import contextmanager
|
2023-06-06 23:54:19 +02:00
|
|
|
from dataclasses import dataclass
|
2017-10-28 22:52:40 +02:00
|
|
|
from typing import (
|
2020-06-11 00:54:34 +02:00
|
|
|
IO,
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
2022-10-06 11:56:48 +02:00
|
|
|
Mapping,
|
2020-06-11 00:54:34 +02:00
|
|
|
Optional,
|
|
|
|
Tuple,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
2020-08-07 04:45:55 +02:00
|
|
|
cast,
|
2017-10-28 22:52:40 +02:00
|
|
|
)
|
2020-06-19 00:32:55 +02:00
|
|
|
from unittest import mock
|
2023-06-07 23:01:42 +02:00
|
|
|
from unittest.mock import patch
|
2016-01-25 20:38:44 +01:00
|
|
|
|
2022-07-30 07:09:07 +02:00
|
|
|
import boto3.session
|
2020-06-19 00:32:55 +02:00
|
|
|
import fakeldap
|
|
|
|
import ldap
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2021-07-09 17:40:21 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db.migrations.state import StateApps
|
2021-07-09 10:10:58 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
|
|
|
from django.http.request import QueryDict
|
2022-06-08 04:52:09 +02:00
|
|
|
from django.http.response import HttpResponseBase
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.test import override_settings
|
|
|
|
from django.urls import URLResolver
|
2024-01-29 00:32:21 +01:00
|
|
|
from moto.core.decorator import mock_aws
|
2021-08-10 02:11:16 +02:00
|
|
|
from mypy_boto3_s3.service_resource import Bucket
|
2023-12-02 01:54:31 +01:00
|
|
|
from typing_extensions import ParamSpec, override
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
from zerver.actions.realm_settings import do_set_realm_user_default_setting
|
|
|
|
from zerver.actions.user_settings import do_change_user_setting
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib import cache
|
2016-12-19 08:48:03 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2017-02-13 09:19:52 +01:00
|
|
|
from zerver.lib.cache import get_cache_backend
|
2023-11-15 22:25:00 +01:00
|
|
|
from zerver.lib.db import Params, Query, TimeTrackingCursor
|
2019-01-11 01:26:11 +01:00
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
2023-07-14 19:46:50 +02:00
|
|
|
from zerver.lib.per_request_cache import flush_per_request_caches
|
2023-06-07 23:01:42 +02:00
|
|
|
from zerver.lib.rate_limiter import RateLimitedIPAddr, rules
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import RequestNotes
|
2022-12-14 21:51:37 +01:00
|
|
|
from zerver.lib.upload.s3 import S3UploadBackend
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models import Client, Message, RealmUserDefault, Subscription, UserMessage, UserProfile
|
2023-09-27 02:10:49 +02:00
|
|
|
from zerver.models.clients import clear_client_cache, get_client
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2020-07-01 00:43:55 +02:00
|
|
|
from zerver.tornado.handlers import AsyncDjangoHandler, allocate_handler_id
|
2021-07-09 17:40:21 +02:00
|
|
|
from zilencer.models import RemoteZulipServer
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import ExternalAuthDataDict, ExternalAuthResult
|
2020-02-23 18:58:08 +01:00
|
|
|
|
2019-07-30 20:58:48 +02:00
|
|
|
if TYPE_CHECKING:
|
2022-06-08 04:52:09 +02:00
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2018-12-17 20:14:47 +01:00
|
|
|
# Avoid an import cycle; we only need these for type annotations.
|
2022-06-14 22:44:49 +02:00
|
|
|
from zerver.lib.test_classes import MigrationsTestCase, ZulipTestCase
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-12-13 10:59:54 +01:00
|
|
|
|
|
|
|
class MockLDAP(fakeldap.MockLDAP):
|
|
|
|
class LDAPError(ldap.LDAPError):
|
|
|
|
pass
|
|
|
|
|
2022-11-16 06:32:50 +01:00
|
|
|
class INVALID_CREDENTIALS(ldap.INVALID_CREDENTIALS): # noqa: N801
|
2016-12-13 10:59:54 +01:00
|
|
|
pass
|
|
|
|
|
2022-11-16 06:32:50 +01:00
|
|
|
class NO_SUCH_OBJECT(ldap.NO_SUCH_OBJECT): # noqa: N801
|
2016-12-13 10:59:54 +01:00
|
|
|
pass
|
|
|
|
|
2022-11-16 06:32:50 +01:00
|
|
|
class ALREADY_EXISTS(ldap.ALREADY_EXISTS): # noqa: N801
|
2016-12-13 10:59:54 +01:00
|
|
|
pass
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-27 06:31:26 +02:00
|
|
|
@contextmanager
|
2021-02-12 08:19:30 +01:00
|
|
|
def stub_event_queue_user_events(
|
|
|
|
event_queue_return: Any, user_events_return: Any
|
|
|
|
) -> Iterator[None]:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.events.request_event_queue", return_value=event_queue_return):
|
|
|
|
with mock.patch("zerver.lib.events.get_user_events", return_value=user_events_return):
|
2017-07-27 06:31:26 +02:00
|
|
|
yield
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-15 14:59:13 +02:00
|
|
|
@contextmanager
|
|
|
|
def cache_tries_captured() -> Iterator[List[Tuple[str, Union[str, List[str]], Optional[str]]]]:
|
|
|
|
cache_queries: List[Tuple[str, Union[str, List[str]], Optional[str]]] = []
|
|
|
|
|
|
|
|
orig_get = cache.cache_get
|
|
|
|
orig_get_many = cache.cache_get_many
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get(key: str, cache_name: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("get", key, cache_name))
|
2020-10-15 14:59:13 +02:00
|
|
|
return orig_get(key, cache_name)
|
|
|
|
|
2023-08-11 18:03:34 +02:00
|
|
|
def my_cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("getmany", keys, cache_name))
|
2020-10-15 14:59:13 +02:00
|
|
|
return orig_get_many(keys, cache_name)
|
|
|
|
|
|
|
|
with mock.patch.multiple(cache, cache_get=my_cache_get, cache_get_many=my_cache_get_many):
|
|
|
|
yield cache_queries
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2020-07-05 01:58:06 +02:00
|
|
|
def simulated_empty_cache() -> Iterator[List[Tuple[str, Union[str, List[str]], Optional[str]]]]:
|
|
|
|
cache_queries: List[Tuple[str, Union[str, List[str]], Optional[str]]] = []
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get(key: str, cache_name: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("get", key, cache_name))
|
2014-01-27 22:53:36 +01:00
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def my_cache_get_many(
|
|
|
|
keys: List[str], cache_name: Optional[str] = None
|
|
|
|
) -> Dict[str, Any]: # nocoverage -- simulated code doesn't use this
|
2021-02-12 08:20:45 +01:00
|
|
|
cache_queries.append(("getmany", keys, cache_name))
|
2017-02-11 05:26:24 +01:00
|
|
|
return {}
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2020-10-15 14:50:07 +02:00
|
|
|
with mock.patch.multiple(cache, cache_get=my_cache_get, cache_get_many=my_cache_get_many):
|
|
|
|
yield cache_queries
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-06-06 23:54:19 +02:00
|
|
|
@dataclass
|
|
|
|
class CapturedQuery:
|
|
|
|
sql: str
|
2022-10-15 22:59:17 +02:00
|
|
|
time: str
|
|
|
|
|
|
|
|
|
2014-01-27 22:53:36 +01:00
|
|
|
@contextmanager
|
2021-02-12 08:19:30 +01:00
|
|
|
def queries_captured(
|
|
|
|
include_savepoints: bool = False, keep_cache_warm: bool = False
|
2023-06-06 23:54:19 +02:00
|
|
|
) -> Iterator[List[CapturedQuery]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2014-01-27 22:53:36 +01:00
|
|
|
Allow a user to capture just the queries executed during
|
|
|
|
the with statement.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2023-06-06 23:54:19 +02:00
|
|
|
queries: List[CapturedQuery] = []
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2023-11-15 22:25:00 +01:00
|
|
|
def cursor_execute(self: TimeTrackingCursor, sql: Query, vars: Optional[Params] = None) -> None:
|
2014-01-27 22:53:36 +01:00
|
|
|
start = time.time()
|
|
|
|
try:
|
2023-11-15 22:25:00 +01:00
|
|
|
return super(TimeTrackingCursor, self).execute(sql, vars)
|
2014-01-27 22:53:36 +01:00
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
2021-02-12 08:20:45 +01:00
|
|
|
if include_savepoints or not isinstance(sql, str) or "SAVEPOINT" not in sql:
|
2021-02-12 08:19:30 +01:00
|
|
|
queries.append(
|
2023-06-06 23:54:19 +02:00
|
|
|
CapturedQuery(
|
2023-11-15 22:25:00 +01:00
|
|
|
sql=self.mogrify(sql, vars).decode(),
|
2023-06-06 23:54:19 +02:00
|
|
|
time=f"{duration:.3f}",
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2023-11-15 22:25:00 +01:00
|
|
|
def cursor_executemany(
|
|
|
|
self: TimeTrackingCursor, sql: Query, vars_list: Iterable[Params]
|
|
|
|
) -> None: # nocoverage -- doesn't actually get used in tests
|
|
|
|
vars_list, vars_list1 = itertools.tee(vars_list)
|
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
return super(TimeTrackingCursor, self).executemany(sql, vars_list)
|
|
|
|
finally:
|
|
|
|
stop = time.time()
|
|
|
|
duration = stop - start
|
|
|
|
queries.extend(
|
|
|
|
CapturedQuery(
|
|
|
|
sql=self.mogrify(sql, vars).decode(),
|
|
|
|
time=f"{duration:.3f}",
|
|
|
|
)
|
|
|
|
for vars in vars_list1
|
|
|
|
)
|
2014-01-27 22:53:36 +01:00
|
|
|
|
tests: Fix queries_captured to clear cache up front.
Before this change we were clearing the cache on
every SQL usage.
The code to do this was added in February 2017
in 6db4879f9c9fd6941d3aa2af6138ea75aa6675a6.
Now we clear the cache just one time, but before
the action/request under test.
Tests that want to count queries with a warm
cache now specify keep_cache_warm=True. Those
tests were particularly flawed before this change.
In general, the old code both over-counted and
under-counted queries.
It under-counted SQL usage for requests that were
able to pull some data out of a warm cache before
they did any SQL. Typically this would have bypassed
the initial query to get UserProfile, so you
will see several off-by-one fixes.
The old code over-counted SQL usage to the extent
that it's a rather extreme assumption that during
an action itself, the entries that you put into
the cache will get thrown away. And that's essentially
what the prior code simulated.
Now, it's still bad if an action keeps hitting the
cache for no reason, but it's not as bad as hitting
the database. There doesn't appear to be any evidence
of us doing something silly like fetching the same
data from the cache in a loop, but there are
opportunities to prevent second or third round
trips to the cache for the same object, if we
can re-structure the code so that the same caller
doesn't have two callees get the same data.
Note that for invites, we have some cache hits
that are due to the nature of how we serialize
data to our queue processor--we generally just
serialize ids, and then re-fetch objects when
we pop them off the queue.
2020-11-04 12:02:00 +01:00
|
|
|
if not keep_cache_warm:
|
|
|
|
cache = get_cache_backend(None)
|
|
|
|
cache.clear()
|
2023-08-11 16:40:06 +02:00
|
|
|
flush_per_request_caches()
|
2023-09-27 02:10:49 +02:00
|
|
|
clear_client_cache()
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch.multiple(
|
|
|
|
TimeTrackingCursor, execute=cursor_execute, executemany=cursor_executemany
|
|
|
|
):
|
2020-07-01 00:43:55 +02:00
|
|
|
yield queries
|
2014-01-27 22:53:36 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-09 22:58:43 +01:00
|
|
|
@contextmanager
|
2017-11-05 11:15:10 +01:00
|
|
|
def stdout_suppressed() -> Iterator[IO[str]]:
|
2017-02-09 22:58:43 +01:00
|
|
|
"""Redirect stdout to /dev/null."""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.devnull, "a") as devnull:
|
2017-08-25 20:01:20 +02:00
|
|
|
stdout, sys.stdout = sys.stdout, devnull
|
2022-01-12 04:07:40 +01:00
|
|
|
try:
|
|
|
|
yield stdout
|
|
|
|
finally:
|
|
|
|
sys.stdout = stdout
|
2017-02-09 22:58:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-03-01 07:34:25 +01:00
|
|
|
def reset_email_visibility_to_everyone_in_zulip_realm() -> None:
|
|
|
|
"""
|
|
|
|
This function is used to reset email visibility for all users and
|
|
|
|
RealmUserDefault object in the zulip realm in development environment
|
|
|
|
to "EMAIL_ADDRESS_VISIBILITY_EVERYONE" since the default value is
|
|
|
|
"EMAIL_ADDRESS_VISIBILITY_ADMINS". This function is needed in
|
|
|
|
tests that want "email" field of users to be set to their real email.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
do_set_realm_user_default_setting(
|
|
|
|
realm_user_default,
|
2021-03-01 11:33:24 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
2021-03-01 11:33:24 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-10-26 09:15:16 +02:00
|
|
|
users = UserProfile.objects.filter(realm=realm)
|
|
|
|
for user in users:
|
|
|
|
do_change_user_setting(
|
|
|
|
user,
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def get_test_image_file(filename: str) -> IO[bytes]:
|
2021-02-12 08:20:45 +01:00
|
|
|
test_avatar_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "../tests/images"))
|
2023-02-02 03:40:54 +01:00
|
|
|
return open(os.path.join(test_avatar_dir, filename), "rb") # noqa: SIM115
|
2016-12-19 08:48:03 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-12 17:17:33 +01:00
|
|
|
def read_test_image_file(filename: str) -> bytes:
|
|
|
|
with get_test_image_file(filename) as img_file:
|
|
|
|
return img_file.read()
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def avatar_disk_path(
|
|
|
|
user_profile: UserProfile, medium: bool = False, original: bool = False
|
|
|
|
) -> str:
|
2016-12-19 08:48:03 +01:00
|
|
|
avatar_url_path = avatar_url(user_profile, medium)
|
2020-07-05 01:58:06 +02:00
|
|
|
assert avatar_url_path is not None
|
2022-05-31 00:59:29 +02:00
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
2022-12-12 22:02:25 +01:00
|
|
|
assert settings.LOCAL_AVATARS_DIR is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
avatar_disk_path = os.path.join(
|
2022-12-12 22:02:25 +01:00
|
|
|
settings.LOCAL_AVATARS_DIR,
|
2021-02-12 08:19:30 +01:00
|
|
|
avatar_url_path.split("/")[-2],
|
|
|
|
avatar_url_path.split("/")[-1].split("?")[0],
|
|
|
|
)
|
2018-06-06 14:30:26 +02:00
|
|
|
if original:
|
2019-02-14 13:37:32 +01:00
|
|
|
return avatar_disk_path.replace(".png", ".original")
|
2016-12-19 08:48:03 +01:00
|
|
|
return avatar_disk_path
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def make_client(name: str) -> Client:
|
2016-09-13 23:32:35 +02:00
|
|
|
client, _ = Client.objects.get_or_create(name=name)
|
|
|
|
return client
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def find_key_by_email(address: str) -> Optional[str]:
|
2014-01-27 23:43:02 +01:00
|
|
|
from django.core.mail import outbox
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-04-26 20:30:22 +02:00
|
|
|
key_regex = re.compile(r"accounts/do_confirm/([a-z0-9]{24})>")
|
2014-01-27 23:43:02 +01:00
|
|
|
for message in reversed(outbox):
|
|
|
|
if address in message.to:
|
2023-05-11 02:34:34 +02:00
|
|
|
match = key_regex.search(str(message.body))
|
2020-07-05 01:58:06 +02:00
|
|
|
assert match is not None
|
|
|
|
[key] = match.groups()
|
|
|
|
return key
|
2017-03-05 09:06:36 +01:00
|
|
|
return None # nocoverage -- in theory a test might want this case, but none do
|
2014-01-27 23:43:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_stream_count(user_profile: UserProfile) -> int:
|
2021-02-12 08:19:30 +01:00
|
|
|
return UserMessage.objects.select_related("message").filter(user_profile=user_profile).count()
|
|
|
|
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_usermessage(user_profile: UserProfile) -> UserMessage:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = (
|
|
|
|
UserMessage.objects.select_related("message")
|
|
|
|
.filter(user_profile=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("-message")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-05-17 21:15:50 +02:00
|
|
|
return query[0] # Django does LIMIT here
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def most_recent_message(user_profile: UserProfile) -> Message:
|
2014-01-31 16:44:45 +01:00
|
|
|
usermessage = most_recent_usermessage(user_profile)
|
|
|
|
return usermessage.message
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_subscription(stream_name: str, user_profile: UserProfile) -> Subscription:
|
2017-01-30 04:31:24 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient_id = stream.recipient_id
|
2022-06-15 04:59:36 +02:00
|
|
|
assert recipient_id is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
return Subscription.objects.get(
|
|
|
|
user_profile=user_profile, recipient_id=recipient_id, active=True
|
|
|
|
)
|
|
|
|
|
2017-01-30 04:31:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_messages(user_profile: UserProfile) -> List[Message]:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = (
|
|
|
|
UserMessage.objects.select_related("message")
|
|
|
|
.filter(user_profile=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("message")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2014-01-31 16:44:45 +01:00
|
|
|
return [um.message for um in query]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-01 00:43:55 +02:00
|
|
|
class DummyHandler(AsyncDjangoHandler):
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self) -> None:
|
2022-06-23 23:46:27 +02:00
|
|
|
self.handler_id = allocate_handler_id(self)
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-24 09:18:14 +02:00
|
|
|
dummy_handler = DummyHandler()
|
|
|
|
|
|
|
|
|
2021-07-09 10:10:58 +02:00
|
|
|
class HostRequestMock(HttpRequest):
|
2021-02-07 21:34:01 +01:00
|
|
|
"""A mock request object where get_host() works. Useful for testing
|
|
|
|
routes that use Zulip's subdomains feature"""
|
2014-01-31 16:44:45 +01:00
|
|
|
|
2021-07-17 12:25:08 +02:00
|
|
|
# The base class HttpRequest declares GET and POST as immutable
|
|
|
|
# QueryDict objects. The implementation of HostRequestMock
|
|
|
|
# requires POST to be mutable, and we have some use cases that
|
|
|
|
# modify GET, so GET and POST are both redeclared as mutable.
|
|
|
|
|
|
|
|
GET: QueryDict # type: ignore[assignment] # See previous comment.
|
|
|
|
POST: QueryDict # type: ignore[assignment] # See previous comment.
|
|
|
|
|
2021-02-07 21:34:01 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
2022-10-06 11:56:48 +02:00
|
|
|
post_data: Mapping[str, Any] = {},
|
2022-06-12 21:33:20 +02:00
|
|
|
user_profile: Union[UserProfile, None] = None,
|
|
|
|
remote_server: Optional[RemoteZulipServer] = None,
|
2021-02-07 21:34:01 +01:00
|
|
|
host: str = settings.EXTERNAL_HOST,
|
2021-04-29 12:54:30 +02:00
|
|
|
client_name: Optional[str] = None,
|
2021-07-09 10:10:58 +02:00
|
|
|
meta_data: Optional[Dict[str, Any]] = None,
|
2022-06-24 09:18:14 +02:00
|
|
|
tornado_handler: Optional[AsyncDjangoHandler] = None,
|
2021-08-18 18:42:39 +02:00
|
|
|
path: str = "",
|
2021-02-07 21:34:01 +01:00
|
|
|
) -> None:
|
|
|
|
self.host = host
|
2021-07-09 10:10:58 +02:00
|
|
|
self.GET = QueryDict(mutable=True)
|
2021-02-07 21:34:01 +01:00
|
|
|
self.method = ""
|
2020-01-28 06:45:32 +01:00
|
|
|
|
|
|
|
# Convert any integer parameters passed into strings, even
|
|
|
|
# though of course the HTTP API would do so. Ideally, we'd
|
|
|
|
# get rid of this abstraction entirely and just use the HTTP
|
2021-02-07 21:34:01 +01:00
|
|
|
# API directly, but while it exists, we need this code
|
2021-07-09 10:10:58 +02:00
|
|
|
self.POST = QueryDict(mutable=True)
|
2020-01-28 06:45:32 +01:00
|
|
|
for key in post_data:
|
|
|
|
self.POST[key] = str(post_data[key])
|
2021-02-07 21:34:01 +01:00
|
|
|
self.method = "POST"
|
2020-01-28 06:45:32 +01:00
|
|
|
|
2021-07-09 10:10:58 +02:00
|
|
|
if meta_data is None:
|
|
|
|
self.META = {"PATH_INFO": "test"}
|
|
|
|
else:
|
|
|
|
self.META = meta_data
|
2021-08-18 18:42:39 +02:00
|
|
|
self.path = path
|
2022-07-11 19:35:31 +02:00
|
|
|
self.user = user_profile or AnonymousUser()
|
2023-09-08 07:53:57 +02:00
|
|
|
self._body = orjson.dumps(post_data)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.content_type = ""
|
2021-08-21 19:24:20 +02:00
|
|
|
|
|
|
|
RequestNotes.set_notes(
|
|
|
|
self,
|
|
|
|
RequestNotes(
|
|
|
|
client_name="",
|
|
|
|
log_data={},
|
2022-06-24 10:20:46 +02:00
|
|
|
tornado_handler_id=None if tornado_handler is None else tornado_handler.handler_id,
|
2021-08-21 19:24:20 +02:00
|
|
|
client=get_client(client_name) if client_name is not None else None,
|
2022-06-12 21:33:20 +02:00
|
|
|
remote_server=remote_server,
|
2021-08-21 19:24:20 +02:00
|
|
|
),
|
2021-07-09 10:06:04 +02:00
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_host(self) -> str:
|
2016-09-28 06:06:21 +02:00
|
|
|
return self.host
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
INSTRUMENTING = os.environ.get("TEST_INSTRUMENT_URL_COVERAGE", "") == "TRUE"
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
INSTRUMENTED_CALLS: List[Dict[str, Any]] = []
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
UrlFuncT = TypeVar("UrlFuncT", bound=Callable[..., HttpResponseBase]) # TODO: make more specific
|
2016-09-12 03:06:25 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def append_instrumentation_data(data: Dict[str, Any]) -> None:
|
2017-02-10 05:42:41 +01:00
|
|
|
INSTRUMENTED_CALLS.append(data)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def instrument_url(f: UrlFuncT) -> UrlFuncT:
|
2022-06-14 22:44:49 +02:00
|
|
|
# TODO: Type this with ParamSpec to preserve the function signature.
|
2017-03-05 09:06:36 +01:00
|
|
|
if not INSTRUMENTING: # nocoverage -- option is always enabled; should we remove?
|
2016-07-28 01:40:28 +02:00
|
|
|
return f
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def wrapper(
|
2022-06-14 22:44:49 +02:00
|
|
|
self: "ZulipTestCase", url: str, info: object = {}, **kwargs: Union[bool, str]
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> HttpResponseBase:
|
2016-07-28 01:40:28 +02:00
|
|
|
start = time.time()
|
|
|
|
result = f(self, url, info, **kwargs)
|
|
|
|
delay = time.time() - start
|
|
|
|
test_name = self.id()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "?" in url:
|
|
|
|
url, extra_info = url.split("?", 1)
|
2016-07-28 01:40:28 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_info = ""
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2021-02-07 21:34:01 +01:00
|
|
|
if isinstance(info, HostRequestMock):
|
|
|
|
info = "<HostRequestMock>"
|
2020-08-07 01:09:47 +02:00
|
|
|
elif isinstance(info, bytes):
|
|
|
|
info = "<bytes>"
|
|
|
|
elif isinstance(info, dict):
|
|
|
|
info = {
|
2022-10-08 06:10:17 +02:00
|
|
|
k: "<file object>" if hasattr(v, "read") and callable(v.read) else v
|
2020-08-07 01:09:47 +02:00
|
|
|
for k, v in info.items()
|
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
append_instrumentation_data(
|
|
|
|
dict(
|
|
|
|
url=url,
|
|
|
|
status_code=result.status_code,
|
|
|
|
method=f.__name__,
|
|
|
|
delay=delay,
|
|
|
|
extra_info=extra_info,
|
|
|
|
info=info,
|
|
|
|
test_name=test_name,
|
|
|
|
kwargs=kwargs,
|
|
|
|
)
|
|
|
|
)
|
2016-07-28 01:40:28 +02:00
|
|
|
return result
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-07 04:45:55 +02:00
|
|
|
return cast(UrlFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-11 01:26:11 +01:00
|
|
|
def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> None:
|
2016-07-28 01:40:28 +02:00
|
|
|
if INSTRUMENTING:
|
2016-07-28 02:40:04 +02:00
|
|
|
calls = INSTRUMENTED_CALLS
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
from zproject.urls import urlpatterns, v1_api_and_json_patterns
|
|
|
|
|
|
|
|
# Find our untested urls.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
pattern_cnt: Dict[str, int] = collections.defaultdict(int)
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2021-12-23 06:51:09 +01:00
|
|
|
def re_strip(r: str) -> str:
|
|
|
|
assert r.startswith(r"^")
|
|
|
|
if r.endswith(r"$"):
|
|
|
|
return r[1:-1]
|
|
|
|
else:
|
|
|
|
assert r.endswith(r"\Z")
|
|
|
|
return r[1:-2]
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_patterns(patterns: List[Any], prefixes: List[str]) -> None:
|
2016-11-24 19:45:40 +01:00
|
|
|
for pattern in patterns:
|
|
|
|
find_pattern(pattern, prefixes)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def cleanup_url(url: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if url.startswith("/"):
|
2016-11-24 19:45:40 +01:00
|
|
|
url = url[1:]
|
2021-02-12 08:20:45 +01:00
|
|
|
if url.startswith("http://testserver/"):
|
|
|
|
url = url[len("http://testserver/") :]
|
|
|
|
if url.startswith("http://zulip.testserver/"):
|
|
|
|
url = url[len("http://zulip.testserver/") :]
|
|
|
|
if url.startswith("http://testserver:9080/"):
|
|
|
|
url = url[len("http://testserver:9080/") :]
|
2016-11-24 19:45:40 +01:00
|
|
|
return url
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def find_pattern(pattern: Any, prefixes: List[str]) -> None:
|
2018-02-02 05:43:18 +01:00
|
|
|
if isinstance(pattern, type(URLResolver)):
|
2017-03-05 09:06:36 +01:00
|
|
|
return # nocoverage -- shouldn't actually happen
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(pattern, "url_patterns"):
|
2016-11-24 19:45:40 +01:00
|
|
|
return
|
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
canon_pattern = prefixes[0] + re_strip(pattern.pattern.regex.pattern)
|
2016-11-24 19:45:40 +01:00
|
|
|
cnt = 0
|
|
|
|
for call in calls:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "pattern" in call:
|
2016-11-24 19:45:40 +01:00
|
|
|
continue
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
url = cleanup_url(call["url"])
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
for prefix in prefixes:
|
|
|
|
if url.startswith(prefix):
|
2021-02-12 08:19:30 +01:00
|
|
|
match_url = url[len(prefix) :]
|
2018-02-02 05:43:18 +01:00
|
|
|
if pattern.resolve(match_url):
|
2021-02-12 08:20:45 +01:00
|
|
|
if call["status_code"] in [200, 204, 301, 302]:
|
2016-11-24 19:45:40 +01:00
|
|
|
cnt += 1
|
2021-02-12 08:20:45 +01:00
|
|
|
call["pattern"] = canon_pattern
|
2016-11-24 19:45:40 +01:00
|
|
|
pattern_cnt[canon_pattern] += cnt
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
find_patterns(urlpatterns, ["", "en/", "de/"])
|
|
|
|
find_patterns(v1_api_and_json_patterns, ["api/v1/", "json/"])
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
assert len(pattern_cnt) > 100
|
2020-05-07 13:56:49 +02:00
|
|
|
untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}
|
2016-11-24 19:45:40 +01:00
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
exempt_patterns = {
|
2017-03-23 19:59:24 +01:00
|
|
|
# We exempt some patterns that are called via Tornado.
|
2021-02-12 08:20:45 +01:00
|
|
|
"api/v1/events",
|
|
|
|
"api/v1/events/internal",
|
|
|
|
"api/v1/register",
|
2017-03-23 19:59:24 +01:00
|
|
|
# We also exempt some development environment debugging
|
|
|
|
# static content URLs, since the content they point to may
|
|
|
|
# or may not exist.
|
2021-02-12 08:20:45 +01:00
|
|
|
"coverage/(?P<path>.+)",
|
2023-10-11 06:22:33 +02:00
|
|
|
"config-error/(?P<error_name>[^/]+)",
|
2021-02-12 08:20:45 +01:00
|
|
|
"confirmation_key/",
|
|
|
|
"node-coverage/(?P<path>.+)",
|
2024-03-19 16:06:49 +01:00
|
|
|
"docs/",
|
2021-02-12 08:20:45 +01:00
|
|
|
"docs/(?P<path>.+)",
|
|
|
|
"casper/(?P<path>.+)",
|
|
|
|
"static/(?P<path>.+)",
|
2021-03-24 12:18:30 +01:00
|
|
|
"flush_caches",
|
2021-05-07 00:38:24 +02:00
|
|
|
"external_content/(?P<digest>[^/]+)/(?P<received_url>[^/]+)",
|
user_groups: Make locks required for updating user group memberships.
**Background**
User groups are expected to comply with the DAG constraint for the
many-to-many inter-group membership. The check for this constraint has
to be performed recursively so that we can find all direct and indirect
subgroups of the user group to be added.
This kind of check is vulnerable to phantom reads which is possible at
the default read committed isolation level because we cannot guarantee
that the check is still valid when we are adding the subgroups to the
user group.
**Solution**
To avoid having another transaction concurrently update one of the
to-be-subgroup after the recursive check is done, and before the subgroup
is added, we use SELECT FOR UPDATE to lock the user group rows.
The lock needs to be acquired before a group membership change is about
to occur before any check has been conducted.
Suppose that we are adding subgroup B to supergroup A, the locking protocol
is specified as follows:
1. Acquire a lock for B and all its direct and indirect subgroups.
2. Acquire a lock for A.
For the removal of user groups, we acquire a lock for the user group to
be removed with all its direct and indirect subgroups. This is the special
case A=B, which is still complaint with the protocol.
**Error handling**
We currently rely on Postgres' deadlock detection to abort transactions
and show an error for the users. In the future, we might need some
recovery mechanism or at least better error handling.
**Notes**
An important note is that we need to reuse the recursive CTE query that
finds the direct and indirect subgroups when applying the lock on the
rows. And the lock needs to be acquired the same way for the addition and
removal of direct subgroups.
User membership change (as opposed to user group membership) is not
affected. Read-only queries aren't either. The locks only protect
critical regions where the user group dependency graph might violate
the DAG constraint, where users are not participating.
**Testing**
We implement a transaction test case targeting some typical scenarios
when an internal server error is expected to happen (this means that the
user group view makes the correct decision to abort the transaction when
something goes wrong with locks).
To achieve this, we add a development view intended only for unit tests.
It has a global BARRIER that can be shared across threads, so that we
can synchronize them to consistently reproduce certain potential race
conditions prevented by the database locks.
The transaction test case lanuches pairs of threads initiating possibly
conflicting requests at the same time. The tests are set up such that exactly N
of them are expected to succeed with a certain error message (while we don't
know each one).
**Security notes**
get_recursive_subgroups_for_groups will no longer fetch user groups from
other realms. As a result, trying to add/remove a subgroup from another
realm results in a UserGroup not found error response.
We also implement subgroup-specific checks in has_user_group_access to
keep permission managing in a single place. Do note that the API
currently don't have a way to violate that check because we are only
checking the realm ID now.
2023-06-17 04:39:52 +02:00
|
|
|
# Such endpoints are only used in certain test cases that can be skipped
|
|
|
|
"testing/(?P<path>.+)",
|
2021-10-18 16:30:46 +02:00
|
|
|
# These are SCIM2 urls overridden from django-scim2 to return Not Implemented.
|
2021-09-10 18:36:56 +02:00
|
|
|
# We actually test them, but it's not being detected as a tested pattern,
|
|
|
|
# possibly due to the use of re_path. TODO: Investigate and get them
|
|
|
|
# recognized as tested.
|
2021-10-16 19:40:27 +02:00
|
|
|
"scim/v2/",
|
|
|
|
"scim/v2/.search",
|
|
|
|
"scim/v2/Bulk",
|
|
|
|
"scim/v2/Me",
|
|
|
|
"scim/v2/ResourceTypes(?:/(?P<uuid>[^/]+))?",
|
|
|
|
"scim/v2/Schemas(?:/(?P<uuid>[^/]+))?",
|
|
|
|
"scim/v2/ServiceProviderConfig",
|
2021-09-10 18:36:56 +02:00
|
|
|
"scim/v2/Groups(?:/(?P<uuid>[^/]+))?",
|
|
|
|
"scim/v2/Groups/.search",
|
2024-02-28 20:03:34 +01:00
|
|
|
# This endpoint only returns 500 and 404 codes, so it doesn't get picked up
|
|
|
|
# by find_pattern above and therefore needs to be exempt.
|
|
|
|
"self-hosted-billing/not-configured/",
|
2020-09-02 06:59:07 +02:00
|
|
|
*(webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks),
|
|
|
|
}
|
2016-11-24 19:45:40 +01:00
|
|
|
|
|
|
|
untested_patterns -= exempt_patterns
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
var_dir = "var" # TODO make sure path is robust here
|
|
|
|
fn = os.path.join(var_dir, "url_coverage.txt")
|
|
|
|
with open(fn, "wb") as f:
|
2016-07-28 02:40:04 +02:00
|
|
|
for call in calls:
|
2020-08-07 01:09:47 +02:00
|
|
|
f.write(orjson.dumps(call, option=orjson.OPT_APPEND_NEWLINE))
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2016-11-19 01:28:28 +01:00
|
|
|
if full_suite:
|
2021-02-12 08:20:45 +01:00
|
|
|
print(f"INFO: URL coverage report is in {fn}")
|
2016-07-28 01:40:28 +02:00
|
|
|
|
2017-03-05 09:06:36 +01:00
|
|
|
if full_suite and len(untested_patterns): # nocoverage -- test suite error handling
|
2016-11-19 01:28:28 +01:00
|
|
|
print("\nERROR: Some URLs are untested! Here's the list of untested URLs:")
|
2016-07-28 02:40:04 +02:00
|
|
|
for untested_pattern in sorted(untested_patterns):
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f" {untested_pattern}")
|
2016-11-19 01:28:28 +01:00
|
|
|
sys.exit(1)
|
2016-07-28 02:40:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def load_subdomain_token(response: Union["TestHttpResponse", HttpResponse]) -> ExternalAuthDataDict:
|
2017-10-27 02:45:38 +02:00
|
|
|
assert isinstance(response, HttpResponseRedirect)
|
2021-02-12 08:20:45 +01:00
|
|
|
token = response.url.rsplit("/", 1)[1]
|
2024-01-10 21:05:59 +01:00
|
|
|
data = ExternalAuthResult(
|
|
|
|
request=mock.MagicMock(), login_token=token, delete_stored_data=False
|
|
|
|
).data_dict
|
2020-01-23 12:21:55 +01:00
|
|
|
assert data is not None
|
|
|
|
return data
|
2017-10-28 22:52:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-02 01:54:31 +01:00
|
|
|
P = ParamSpec("P")
|
2017-10-28 22:52:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-02 01:54:31 +01:00
|
|
|
def use_s3_backend(method: Callable[P, None]) -> Callable[P, None]:
|
2024-01-29 00:32:21 +01:00
|
|
|
@mock_aws
|
2017-10-28 22:52:40 +02:00
|
|
|
@override_settings(LOCAL_UPLOADS_DIR=None)
|
2022-12-12 22:02:25 +01:00
|
|
|
@override_settings(LOCAL_AVATARS_DIR=None)
|
|
|
|
@override_settings(LOCAL_FILES_DIR=None)
|
2023-12-02 01:54:31 +01:00
|
|
|
def new_method(*args: P.args, **kwargs: P.kwargs) -> None:
|
2022-12-16 01:09:23 +01:00
|
|
|
with mock.patch("zerver.lib.upload.upload_backend", S3UploadBackend()):
|
2017-10-28 22:52:40 +02:00
|
|
|
return method(*args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-28 22:52:40 +02:00
|
|
|
return new_method
|
2018-04-25 00:37:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def create_s3_buckets(*bucket_names: str) -> List[Bucket]:
|
2022-07-30 07:09:07 +02:00
|
|
|
session = boto3.session.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2021-02-12 08:20:45 +01:00
|
|
|
s3 = session.resource("s3")
|
2018-12-07 17:52:01 +01:00
|
|
|
buckets = [s3.create_bucket(Bucket=name) for name in bucket_names]
|
2018-12-07 18:15:51 +01:00
|
|
|
return buckets
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-15 20:15:06 +02:00
|
|
|
TestCaseT = TypeVar("TestCaseT", bound="MigrationsTestCase")
|
|
|
|
|
|
|
|
|
2021-02-16 00:53:47 +01:00
|
|
|
def use_db_models(
|
2023-12-05 18:45:07 +01:00
|
|
|
method: Callable[[TestCaseT, StateApps], None],
|
2022-07-15 20:15:06 +02:00
|
|
|
) -> Callable[[TestCaseT, StateApps], None]: # nocoverage
|
|
|
|
def method_patched_with_mock(self: TestCaseT, apps: StateApps) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
ArchivedAttachment = apps.get_model("zerver", "ArchivedAttachment")
|
|
|
|
ArchivedMessage = apps.get_model("zerver", "ArchivedMessage")
|
|
|
|
ArchivedUserMessage = apps.get_model("zerver", "ArchivedUserMessage")
|
|
|
|
Attachment = apps.get_model("zerver", "Attachment")
|
|
|
|
BotConfigData = apps.get_model("zerver", "BotConfigData")
|
|
|
|
BotStorageData = apps.get_model("zerver", "BotStorageData")
|
|
|
|
Client = apps.get_model("zerver", "Client")
|
|
|
|
CustomProfileField = apps.get_model("zerver", "CustomProfileField")
|
|
|
|
CustomProfileFieldValue = apps.get_model("zerver", "CustomProfileFieldValue")
|
|
|
|
DefaultStream = apps.get_model("zerver", "DefaultStream")
|
|
|
|
DefaultStreamGroup = apps.get_model("zerver", "DefaultStreamGroup")
|
|
|
|
EmailChangeStatus = apps.get_model("zerver", "EmailChangeStatus")
|
|
|
|
Huddle = apps.get_model("zerver", "Huddle")
|
|
|
|
Message = apps.get_model("zerver", "Message")
|
|
|
|
MultiuseInvite = apps.get_model("zerver", "MultiuseInvite")
|
2023-12-01 08:20:48 +01:00
|
|
|
OnboardingStep = apps.get_model("zerver", "OnboardingStep")
|
2021-02-12 08:20:45 +01:00
|
|
|
PreregistrationUser = apps.get_model("zerver", "PreregistrationUser")
|
|
|
|
PushDeviceToken = apps.get_model("zerver", "PushDeviceToken")
|
|
|
|
Reaction = apps.get_model("zerver", "Reaction")
|
|
|
|
Realm = apps.get_model("zerver", "Realm")
|
|
|
|
RealmAuditLog = apps.get_model("zerver", "RealmAuditLog")
|
|
|
|
RealmDomain = apps.get_model("zerver", "RealmDomain")
|
|
|
|
RealmEmoji = apps.get_model("zerver", "RealmEmoji")
|
|
|
|
RealmFilter = apps.get_model("zerver", "RealmFilter")
|
|
|
|
Recipient = apps.get_model("zerver", "Recipient")
|
2019-12-25 22:18:36 +01:00
|
|
|
Recipient.PERSONAL = 1
|
|
|
|
Recipient.STREAM = 2
|
2024-03-22 00:39:33 +01:00
|
|
|
Recipient.DIRECT_MESSAGE_GROUP = 3
|
2021-02-12 08:20:45 +01:00
|
|
|
ScheduledEmail = apps.get_model("zerver", "ScheduledEmail")
|
|
|
|
ScheduledMessage = apps.get_model("zerver", "ScheduledMessage")
|
|
|
|
Service = apps.get_model("zerver", "Service")
|
|
|
|
Stream = apps.get_model("zerver", "Stream")
|
|
|
|
Subscription = apps.get_model("zerver", "Subscription")
|
|
|
|
UserActivity = apps.get_model("zerver", "UserActivity")
|
|
|
|
UserActivityInterval = apps.get_model("zerver", "UserActivityInterval")
|
|
|
|
UserGroup = apps.get_model("zerver", "UserGroup")
|
|
|
|
UserGroupMembership = apps.get_model("zerver", "UserGroupMembership")
|
|
|
|
UserMessage = apps.get_model("zerver", "UserMessage")
|
|
|
|
UserPresence = apps.get_model("zerver", "UserPresence")
|
|
|
|
UserProfile = apps.get_model("zerver", "UserProfile")
|
2023-12-01 15:09:26 +01:00
|
|
|
UserTopic = apps.get_model("zerver", "UserTopic")
|
2018-04-25 00:37:55 +02:00
|
|
|
|
|
|
|
zerver_models_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.models",
|
2018-04-25 00:37:55 +02:00
|
|
|
ArchivedAttachment=ArchivedAttachment,
|
|
|
|
ArchivedMessage=ArchivedMessage,
|
|
|
|
ArchivedUserMessage=ArchivedUserMessage,
|
|
|
|
Attachment=Attachment,
|
|
|
|
BotConfigData=BotConfigData,
|
|
|
|
BotStorageData=BotStorageData,
|
|
|
|
Client=Client,
|
|
|
|
CustomProfileField=CustomProfileField,
|
|
|
|
CustomProfileFieldValue=CustomProfileFieldValue,
|
|
|
|
DefaultStream=DefaultStream,
|
|
|
|
DefaultStreamGroup=DefaultStreamGroup,
|
|
|
|
EmailChangeStatus=EmailChangeStatus,
|
|
|
|
Huddle=Huddle,
|
|
|
|
Message=Message,
|
|
|
|
MultiuseInvite=MultiuseInvite,
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic=UserTopic,
|
2023-12-01 08:20:48 +01:00
|
|
|
OnboardingStep=OnboardingStep,
|
2018-04-25 00:37:55 +02:00
|
|
|
PreregistrationUser=PreregistrationUser,
|
|
|
|
PushDeviceToken=PushDeviceToken,
|
|
|
|
Reaction=Reaction,
|
|
|
|
Realm=Realm,
|
|
|
|
RealmAuditLog=RealmAuditLog,
|
|
|
|
RealmDomain=RealmDomain,
|
|
|
|
RealmEmoji=RealmEmoji,
|
|
|
|
RealmFilter=RealmFilter,
|
|
|
|
Recipient=Recipient,
|
|
|
|
ScheduledEmail=ScheduledEmail,
|
|
|
|
ScheduledMessage=ScheduledMessage,
|
|
|
|
Service=Service,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserActivity=UserActivity,
|
|
|
|
UserActivityInterval=UserActivityInterval,
|
|
|
|
UserGroup=UserGroup,
|
|
|
|
UserGroupMembership=UserGroupMembership,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserPresence=UserPresence,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
UserProfile=UserProfile,
|
2018-04-25 00:37:55 +02:00
|
|
|
)
|
|
|
|
zerver_test_helpers_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.test_helpers",
|
2018-04-25 00:37:55 +02:00
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserMessage=UserMessage,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
|
|
|
zerver_test_classes_patch = mock.patch.multiple(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.test_classes",
|
2018-04-25 00:37:55 +02:00
|
|
|
Client=Client,
|
|
|
|
Message=Message,
|
|
|
|
Realm=Realm,
|
|
|
|
Recipient=Recipient,
|
|
|
|
Stream=Stream,
|
|
|
|
Subscription=Subscription,
|
|
|
|
UserProfile=UserProfile,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with zerver_models_patch, zerver_test_helpers_patch, zerver_test_classes_patch:
|
2018-04-25 00:37:55 +02:00
|
|
|
method(self, apps)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-25 00:37:55 +02:00
|
|
|
return method_patched_with_mock
|
2019-08-04 20:45:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-04 20:45:24 +02:00
|
|
|
def create_dummy_file(filename: str) -> str:
|
|
|
|
filepath = os.path.join(settings.TEST_WORKER_DIR, filename)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(filepath, "w") as f:
|
|
|
|
f.write("zulip!")
|
2019-08-04 20:45:24 +02:00
|
|
|
return filepath
|
2020-07-16 18:08:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-16 18:08:15 +02:00
|
|
|
def zulip_reaction_info() -> Dict[str, str]:
|
|
|
|
return dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_name="zulip",
|
|
|
|
emoji_code="zulip",
|
|
|
|
reaction_type="zulip_extra_emoji",
|
2020-07-16 18:08:15 +02:00
|
|
|
)
|
2020-08-14 09:41:22 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-14 09:41:22 +02:00
|
|
|
@contextmanager
|
|
|
|
def mock_queue_publish(
|
2021-02-12 08:19:30 +01:00
|
|
|
method_to_patch: str,
|
|
|
|
**kwargs: object,
|
2020-08-14 09:41:22 +02:00
|
|
|
) -> Iterator[mock.MagicMock]:
|
|
|
|
inner = mock.MagicMock(**kwargs)
|
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
# This helper ensures that events published to the queues are
|
|
|
|
# serializable as JSON; unserializable events would make RabbitMQ
|
|
|
|
# crash in production.
|
2020-08-14 09:41:22 +02:00
|
|
|
def verify_serialize(
|
|
|
|
queue_name: str,
|
|
|
|
event: Dict[str, object],
|
|
|
|
processor: Optional[Callable[[object], None]] = None,
|
|
|
|
) -> None:
|
|
|
|
marshalled_event = orjson.loads(orjson.dumps(event))
|
|
|
|
assert marshalled_event == event
|
|
|
|
inner(queue_name, event, processor)
|
|
|
|
|
|
|
|
with mock.patch(method_to_patch, side_effect=verify_serialize):
|
|
|
|
yield inner
|
2022-11-01 10:00:38 +01:00
|
|
|
|
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
@contextmanager
|
|
|
|
def ratelimit_rule(
|
|
|
|
range_seconds: int,
|
|
|
|
num_requests: int,
|
|
|
|
domain: str = "api_by_user",
|
|
|
|
) -> Iterator[None]:
|
2024-05-20 22:16:21 +02:00
|
|
|
"""Temporarily add a rate-limiting rule to the rate limiter"""
|
2023-06-07 23:01:42 +02:00
|
|
|
RateLimitedIPAddr("127.0.0.1", domain=domain).clear_history()
|
|
|
|
|
|
|
|
domain_rules = rules.get(domain, []).copy()
|
|
|
|
domain_rules.append((range_seconds, num_requests))
|
|
|
|
domain_rules.sort(key=lambda x: x[0])
|
|
|
|
|
|
|
|
with patch.dict(rules, {domain: domain_rules}), override_settings(RATE_LIMITING=True):
|
|
|
|
yield
|