2020-06-11 00:54:34 +02:00
|
|
|
import base64
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
2020-10-09 03:32:00 +02:00
|
|
|
import subprocess
|
2020-06-11 00:54:34 +02:00
|
|
|
import tempfile
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
from contextlib import contextmanager
|
2021-04-06 18:07:33 +02:00
|
|
|
from datetime import timedelta
|
|
|
|
from typing import (
|
2022-06-08 04:52:09 +02:00
|
|
|
TYPE_CHECKING,
|
2021-04-06 18:07:33 +02:00
|
|
|
Any,
|
|
|
|
Callable,
|
2021-05-17 05:39:37 +02:00
|
|
|
Collection,
|
2021-04-06 18:07:33 +02:00
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
Iterator,
|
|
|
|
List,
|
2021-05-08 08:25:06 +02:00
|
|
|
Mapping,
|
2021-04-06 18:07:33 +02:00
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
2022-02-19 03:03:08 +01:00
|
|
|
cast,
|
2021-04-06 18:07:33 +02:00
|
|
|
)
|
2023-11-26 22:42:43 +01:00
|
|
|
from unittest import TestResult, mock, skipUnless
|
2023-12-05 21:14:17 +01:00
|
|
|
from urllib.parse import parse_qs, quote, urlencode
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2020-10-02 00:14:25 +02:00
|
|
|
import lxml.html
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2023-11-26 22:42:43 +01:00
|
|
|
import responses
|
2018-04-09 18:19:55 +02:00
|
|
|
from django.apps import apps
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.conf import settings
|
2021-01-26 04:20:36 +01:00
|
|
|
from django.core.mail import EmailMessage
|
2024-03-21 22:24:07 +01:00
|
|
|
from django.core.signals import got_request_exception
|
2018-04-09 18:19:55 +02:00
|
|
|
from django.db import connection
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db.migrations.executor import MigrationExecutor
|
|
|
|
from django.db.migrations.state import StateApps
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.db.utils import IntegrityError
|
2023-08-10 01:43:12 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseBase
|
2023-11-26 22:42:43 +01:00
|
|
|
from django.http.response import ResponseHeaders
|
2023-08-10 01:43:12 +02:00
|
|
|
from django.test import Client as TestClient
|
2023-07-03 22:26:19 +02:00
|
|
|
from django.test import SimpleTestCase, TestCase, TransactionTestCase
|
2023-08-10 01:43:12 +02:00
|
|
|
from django.test.client import BOUNDARY, MULTIPART_CONTENT, ClientHandler, encode_multipart
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.test.testcases import SerializeMixin
|
|
|
|
from django.urls import resolve
|
2019-05-06 00:59:02 +02:00
|
|
|
from django.utils import translation
|
2021-06-26 10:07:54 +02:00
|
|
|
from django.utils.module_loading import import_string
|
2021-04-06 18:07:33 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2024-04-17 05:28:33 +02:00
|
|
|
from django_stubs_ext import ValuesQuerySet
|
2020-06-11 00:54:34 +02:00
|
|
|
from fakeldap import MockLDAP
|
2023-08-10 01:43:12 +02:00
|
|
|
from openapi_core.contrib.django import DjangoOpenAPIRequest, DjangoOpenAPIResponse
|
2023-11-26 22:42:43 +01:00
|
|
|
from requests import PreparedRequest
|
2022-07-05 22:14:19 +02:00
|
|
|
from two_factor.plugins.phonenumber.models import PhoneDevice
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-06-03 12:20:31 +02:00
|
|
|
from corporate.models import Customer, CustomerPlan, LicenseLedger
|
2022-04-14 23:50:10 +02:00
|
|
|
from zerver.actions.message_send import check_send_message, check_send_stream_message
|
2023-03-27 16:32:30 +02:00
|
|
|
from zerver.actions.realm_settings import (
|
|
|
|
do_change_realm_permission_group_setting,
|
|
|
|
do_set_realm_property,
|
|
|
|
)
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import bulk_add_subscriptions, bulk_remove_subscriptions
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.decorator import do_two_factor_login
|
2020-07-01 09:47:09 +02:00
|
|
|
from zerver.lib.cache import bounce_key_prefix_for_testing
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.initial_password import initial_password
|
2022-10-31 22:13:26 +01:00
|
|
|
from zerver.lib.message import access_message
|
2021-06-15 14:30:51 +02:00
|
|
|
from zerver.lib.notification_data import UserMessageNotificationsData
|
2023-07-14 19:46:50 +02:00
|
|
|
from zerver.lib.per_request_cache import flush_per_request_caches
|
2024-03-19 02:46:33 +01:00
|
|
|
from zerver.lib.redis_utils import bounce_redis_key_prefix_for_testing
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.sessions import get_session_dict_user
|
2022-04-15 04:51:41 +02:00
|
|
|
from zerver.lib.soft_deactivation import do_soft_deactivate_users
|
2023-07-16 15:27:39 +02:00
|
|
|
from zerver.lib.stream_subscription import get_subscribed_stream_ids_for_user
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import (
|
|
|
|
create_stream_if_needed,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
get_default_value_for_history_public_to_subscribers,
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
2022-04-14 23:45:12 +02:00
|
|
|
from zerver.lib.subscription_info import gather_subscriptions
|
2020-08-19 12:40:10 +02:00
|
|
|
from zerver.lib.test_console_output import (
|
|
|
|
ExtraConsoleOutputFinder,
|
2022-11-17 09:30:48 +01:00
|
|
|
ExtraConsoleOutputInTestError,
|
2021-12-23 06:55:47 +01:00
|
|
|
tee_stderr_and_find_extra_console_output,
|
|
|
|
tee_stdout_and_find_extra_console_output,
|
2020-08-19 12:40:10 +02:00
|
|
|
)
|
2023-08-11 15:28:22 +02:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
cache_tries_captured,
|
|
|
|
find_key_by_email,
|
|
|
|
instrument_url,
|
|
|
|
queries_captured,
|
|
|
|
)
|
2022-10-31 22:13:26 +01:00
|
|
|
from zerver.lib.topic import RESOLVED_TOPIC_PREFIX, filter_by_topic_name_via_message
|
2021-08-12 12:15:06 +02:00
|
|
|
from zerver.lib.user_groups import get_system_user_group_for_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2021-06-26 10:07:54 +02:00
|
|
|
from zerver.lib.webhooks.common import (
|
|
|
|
check_send_webhook_message,
|
|
|
|
get_fixture_http_headers,
|
|
|
|
standardize_headers,
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
Message,
|
2024-04-02 18:39:18 +02:00
|
|
|
NamedUserGroup,
|
2023-11-26 22:42:43 +01:00
|
|
|
PushDeviceToken,
|
2021-12-05 11:43:34 +01:00
|
|
|
Reaction,
|
2016-11-10 19:30:09 +01:00
|
|
|
Realm,
|
2021-12-05 11:43:34 +01:00
|
|
|
RealmEmoji,
|
2016-11-10 19:30:09 +01:00
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
2021-08-12 12:15:06 +02:00
|
|
|
UserGroupMembership,
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
UserMessage,
|
2016-11-10 19:30:09 +01:00
|
|
|
UserProfile,
|
2021-12-05 13:42:04 +01:00
|
|
|
UserStatus,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import clear_supported_auth_backends_cache, get_realm
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_realm_stream, get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot, get_user, get_user_by_delivery_email
|
2023-08-10 01:43:12 +02:00
|
|
|
from zerver.openapi.openapi import validate_test_request, validate_test_response
|
2018-08-10 22:43:58 +02:00
|
|
|
from zerver.tornado.event_queue import clear_client_event_queues_for_testing
|
2020-08-27 22:46:39 +02:00
|
|
|
|
|
|
|
if settings.ZILENCER_ENABLED:
|
2023-11-26 22:42:43 +01:00
|
|
|
from zilencer.models import RemoteZulipServer, get_remote_server_by_uuid
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-06-30 09:46:14 +02:00
|
|
|
class EmptyResponseError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2017-02-16 10:10:37 +01:00
|
|
|
class UploadSerializeMixin(SerializeMixin):
|
|
|
|
"""
|
|
|
|
We cannot use override_settings to change upload directory because
|
2020-10-23 02:43:28 +02:00
|
|
|
because settings.LOCAL_UPLOADS_DIR is used in URL pattern and URLs
|
2017-02-16 10:10:37 +01:00
|
|
|
are compiled only once. Otherwise using a different upload directory
|
|
|
|
for conflicting test cases would have provided better performance
|
|
|
|
while providing the required isolation.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
lockfile = "var/upload_lock"
|
2017-02-16 10:10:37 +01:00
|
|
|
|
|
|
|
@classmethod
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-06-13 21:46:53 +02:00
|
|
|
def setUpClass(cls: Any) -> None:
|
2017-02-16 10:10:37 +01:00
|
|
|
if not os.path.exists(cls.lockfile):
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(cls.lockfile, "w"): # nocoverage - rare locking case
|
2017-02-16 10:10:37 +01:00
|
|
|
pass
|
|
|
|
|
2022-06-13 21:46:53 +02:00
|
|
|
super().setUpClass()
|
2017-02-16 10:10:37 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-08-10 01:43:12 +02:00
|
|
|
class ZulipClientHandler(ClientHandler):
|
|
|
|
@override
|
|
|
|
def get_response(self, request: HttpRequest) -> HttpResponseBase:
|
2024-03-21 22:24:07 +01:00
|
|
|
got_exception = False
|
|
|
|
|
|
|
|
def on_exception(**kwargs: object) -> None:
|
|
|
|
nonlocal got_exception
|
|
|
|
if kwargs["request"] is request:
|
|
|
|
got_exception = True
|
|
|
|
|
2023-08-10 01:43:12 +02:00
|
|
|
request.body # noqa: B018 # prevents RawPostDataException
|
2024-03-21 22:24:07 +01:00
|
|
|
got_request_exception.connect(on_exception)
|
|
|
|
try:
|
|
|
|
response = super().get_response(request)
|
|
|
|
finally:
|
|
|
|
got_request_exception.disconnect(on_exception)
|
|
|
|
|
2023-08-10 01:43:12 +02:00
|
|
|
if (
|
2024-03-21 22:24:07 +01:00
|
|
|
not got_exception # Django will reraise this exception
|
|
|
|
and request.method != "OPTIONS"
|
2023-08-10 01:43:12 +02:00
|
|
|
and isinstance(response, HttpResponse)
|
|
|
|
and not (
|
|
|
|
response.status_code == 302 and response.headers["Location"].startswith("/login/")
|
|
|
|
)
|
|
|
|
):
|
|
|
|
openapi_request = DjangoOpenAPIRequest(request)
|
|
|
|
openapi_response = DjangoOpenAPIResponse(response)
|
|
|
|
response_validated = validate_test_response(openapi_request, openapi_response)
|
|
|
|
if response_validated:
|
|
|
|
validate_test_request(
|
|
|
|
openapi_request,
|
|
|
|
str(response.status_code),
|
|
|
|
request.META.get("intentionally_undocumented", False),
|
|
|
|
)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
class ZulipTestClient(TestClient):
|
|
|
|
def __init__(self) -> None:
|
|
|
|
super().__init__()
|
|
|
|
self.handler = ZulipClientHandler(enforce_csrf_checks=False)
|
|
|
|
|
|
|
|
|
2023-07-03 22:26:19 +02:00
|
|
|
class ZulipTestCaseMixin(SimpleTestCase):
|
2017-03-21 15:34:16 +01:00
|
|
|
# Ensure that the test system just shows us diffs
|
2023-03-04 01:24:14 +01:00
|
|
|
maxDiff: Optional[int] = None
|
2023-07-15 18:43:23 +02:00
|
|
|
# This bypasses BAN_CONSOLE_OUTPUT for the test case when set.
|
|
|
|
# Override this to verify if the given extra console output matches the
|
|
|
|
# expectation.
|
|
|
|
expected_console_output: Optional[str] = None
|
2023-08-10 01:43:12 +02:00
|
|
|
client_class = ZulipTestClient
|
2017-03-21 15:34:16 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2020-01-16 22:02:06 +01:00
|
|
|
def setUp(self) -> None:
|
|
|
|
super().setUp()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.API_KEYS: Dict[str, str] = {}
|
2020-01-16 22:02:06 +01:00
|
|
|
|
2020-07-01 09:47:09 +02:00
|
|
|
test_name = self.id()
|
|
|
|
bounce_key_prefix_for_testing(test_name)
|
|
|
|
bounce_redis_key_prefix_for_testing(test_name)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-08-10 22:43:58 +02:00
|
|
|
def tearDown(self) -> None:
|
|
|
|
super().tearDown()
|
|
|
|
# Important: we need to clear event queues to avoid leaking data to future tests.
|
|
|
|
clear_client_event_queues_for_testing()
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache()
|
2019-05-03 22:52:56 +02:00
|
|
|
flush_per_request_caches()
|
2019-05-06 00:59:02 +02:00
|
|
|
translation.activate(settings.LANGUAGE_CODE)
|
2018-08-10 22:43:58 +02:00
|
|
|
|
2023-03-24 16:43:53 +01:00
|
|
|
# Clean up local uploads directory after tests:
|
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
|
|
|
if os.path.exists(settings.LOCAL_UPLOADS_DIR):
|
|
|
|
shutil.rmtree(settings.LOCAL_UPLOADS_DIR)
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Clean up after using fakeldap in LDAP tests:
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(self, "mock_ldap") and hasattr(self, "mock_initialize"):
|
2019-10-16 18:01:38 +02:00
|
|
|
if self.mock_ldap is not None:
|
|
|
|
self.mock_ldap.reset()
|
|
|
|
self.mock_initialize.stop()
|
|
|
|
|
2023-08-19 23:26:20 +02:00
|
|
|
def get_user_from_email(self, email: str, realm: Realm) -> UserProfile:
|
|
|
|
return get_user(email, realm)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-12 08:19:30 +01:00
|
|
|
def run(self, result: Optional[TestResult] = None) -> Optional[TestResult]: # nocoverage
|
2023-07-15 18:43:23 +02:00
|
|
|
if not settings.BAN_CONSOLE_OUTPUT and self.expected_console_output is None:
|
2020-09-02 20:26:49 +02:00
|
|
|
return super().run(result)
|
2020-08-19 12:40:10 +02:00
|
|
|
extra_output_finder = ExtraConsoleOutputFinder()
|
2021-12-23 06:55:47 +01:00
|
|
|
with tee_stderr_and_find_extra_console_output(
|
2021-02-12 08:19:30 +01:00
|
|
|
extra_output_finder
|
2021-12-23 06:55:47 +01:00
|
|
|
), tee_stdout_and_find_extra_console_output(extra_output_finder):
|
2020-09-02 20:26:49 +02:00
|
|
|
test_result = super().run(result)
|
2023-03-21 07:36:29 +01:00
|
|
|
if extra_output_finder.full_extra_output and (
|
|
|
|
test_result is None or test_result.wasSuccessful()
|
|
|
|
):
|
2023-07-15 18:43:23 +02:00
|
|
|
extra_output = extra_output_finder.full_extra_output.decode(errors="replace")
|
|
|
|
if self.expected_console_output is not None:
|
|
|
|
self.assertEqual(extra_output, self.expected_console_output)
|
|
|
|
return test_result
|
|
|
|
|
2020-08-19 12:40:10 +02:00
|
|
|
exception_message = f"""
|
|
|
|
---- UNEXPECTED CONSOLE OUTPUT DETECTED ----
|
|
|
|
|
|
|
|
To ensure that we never miss important error output/warnings,
|
|
|
|
we require test-backend to have clean console output.
|
|
|
|
|
|
|
|
This message usually is triggered by forgotten debugging print()
|
|
|
|
statements or new logging statements. For the latter, you can
|
|
|
|
use `with self.assertLogs()` to capture and verify the log output;
|
|
|
|
use `git grep assertLogs` to see dozens of correct examples.
|
|
|
|
|
|
|
|
You should be able to quickly reproduce this failure with:
|
|
|
|
|
2023-03-07 22:50:16 +01:00
|
|
|
./tools/test-backend --ban-console-output {self.id()}
|
2020-08-19 12:40:10 +02:00
|
|
|
|
|
|
|
Output:
|
2023-07-15 18:43:23 +02:00
|
|
|
{extra_output}
|
2020-08-19 12:40:10 +02:00
|
|
|
--------------------------------------------
|
|
|
|
"""
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ExtraConsoleOutputInTestError(exception_message)
|
2020-08-19 12:40:10 +02:00
|
|
|
return test_result
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2016-11-10 19:30:09 +01:00
|
|
|
WRAPPER_COMMENT:
|
|
|
|
|
|
|
|
We wrap calls to self.client.{patch,put,get,post,delete} for various
|
|
|
|
reasons. Some of this has to do with fixing encodings before calling
|
|
|
|
into the Django code. Some of this has to do with providing a future
|
|
|
|
path for instrumentation. Some of it's just consistency.
|
|
|
|
|
|
|
|
The linter will prevent direct calls to self.client.foo, so the wrapper
|
|
|
|
functions have to fake out the linter by using a local variable called
|
2022-02-08 00:13:33 +01:00
|
|
|
django_client to fool the regex.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-08-26 01:33:53 +02:00
|
|
|
DEFAULT_SUBDOMAIN = "zulip"
|
2024-04-26 20:30:22 +02:00
|
|
|
TOKENIZED_NOREPLY_REGEX = settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS.format(
|
|
|
|
token=r"[a-z0-9_]{24}"
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2022-06-14 22:44:49 +02:00
|
|
|
def set_http_headers(self, extra: Dict[str, str], skip_user_agent: bool = False) -> None:
|
|
|
|
if "subdomain" in extra:
|
|
|
|
assert isinstance(extra["subdomain"], str)
|
|
|
|
extra["HTTP_HOST"] = Realm.host_for_subdomain(extra["subdomain"])
|
|
|
|
del extra["subdomain"]
|
|
|
|
elif "HTTP_HOST" not in extra:
|
|
|
|
extra["HTTP_HOST"] = Realm.host_for_subdomain(self.DEFAULT_SUBDOMAIN)
|
2017-08-26 00:02:02 +02:00
|
|
|
|
2020-02-25 02:53:12 +01:00
|
|
|
# set User-Agent
|
2022-06-14 22:44:49 +02:00
|
|
|
if "HTTP_AUTHORIZATION" in extra:
|
2020-02-25 02:53:12 +01:00
|
|
|
# An API request; use mobile as the default user agent
|
|
|
|
default_user_agent = "ZulipMobile/26.22.145 (iOS 10.3.1)"
|
|
|
|
else:
|
2021-05-14 00:16:30 +02:00
|
|
|
# A web app request; use a browser User-Agent string.
|
2021-02-12 08:19:30 +01:00
|
|
|
default_user_agent = (
|
2023-01-03 02:16:53 +01:00
|
|
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64)"
|
|
|
|
" AppleWebKit/537.36 (KHTML, like Gecko)"
|
|
|
|
" Chrome/79.0.3945.130 Safari/537.36"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-06-14 22:44:49 +02:00
|
|
|
if skip_user_agent:
|
2020-02-25 02:53:12 +01:00
|
|
|
# Provide a way to disable setting User-Agent if desired.
|
2022-06-14 22:44:49 +02:00
|
|
|
assert "HTTP_USER_AGENT" not in extra
|
|
|
|
elif "HTTP_USER_AGENT" not in extra:
|
|
|
|
extra["HTTP_USER_AGENT"] = default_user_agent
|
2020-02-25 02:53:12 +01:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2021-02-12 08:19:30 +01:00
|
|
|
def client_patch(
|
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2021-02-12 08:19:30 +01:00
|
|
|
intentionally_undocumented: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
We need to urlencode, since Django's function won't do it for us.
|
|
|
|
"""
|
2023-12-05 21:14:17 +01:00
|
|
|
encoded = urlencode(info)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["content_type"] = "application/x-www-form-urlencoded"
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-08-10 01:43:12 +02:00
|
|
|
return django_client.patch(
|
2021-02-12 08:19:30 +01:00
|
|
|
url,
|
2023-08-10 01:43:12 +02:00
|
|
|
encoded,
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers=headers,
|
2021-02-12 08:19:30 +01:00
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
2023-08-10 01:43:12 +02:00
|
|
|
**extra,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
@instrument_url
|
2021-02-12 08:19:30 +01:00
|
|
|
def client_patch_multipart(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented: bool = False,
|
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Use this for patch requests that have file uploads or
|
|
|
|
that need some sort of multi-part content. In the future
|
|
|
|
Django's test client may become a bit more flexible,
|
|
|
|
so we can hopefully eliminate this. (When you post
|
|
|
|
with the Django test client, it deals with MULTIPART_CONTENT
|
|
|
|
automatically, but not patch.)
|
|
|
|
"""
|
2022-10-06 11:56:48 +02:00
|
|
|
encoded = encode_multipart(BOUNDARY, dict(info))
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-08-10 01:43:12 +02:00
|
|
|
return django_client.patch(
|
2023-07-19 22:57:31 +02:00
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
content_type=MULTIPART_CONTENT,
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers=headers,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
2023-08-10 01:43:12 +02:00
|
|
|
**extra,
|
2022-06-14 22:44:49 +02:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def json_patch(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
payload: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2021-09-10 18:36:56 +02:00
|
|
|
data = orjson.dumps(payload)
|
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
|
|
|
return django_client.patch(
|
2023-07-19 22:57:31 +02:00
|
|
|
url,
|
|
|
|
data=data,
|
|
|
|
content_type="application/json",
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers=None,
|
|
|
|
**extra,
|
2022-06-14 22:44:49 +02:00
|
|
|
)
|
2021-09-10 18:36:56 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2022-06-08 04:52:09 +02:00
|
|
|
def client_put(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2023-12-05 21:14:17 +01:00
|
|
|
encoded = urlencode(info)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["content_type"] = "application/x-www-form-urlencoded"
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-07-19 22:57:31 +02:00
|
|
|
return django_client.put(
|
|
|
|
url, encoded, follow=follow, secure=secure, headers=headers, **extra
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def json_put(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
payload: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2021-09-10 18:36:56 +02:00
|
|
|
data = orjson.dumps(payload)
|
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
|
|
|
return django_client.put(
|
2023-07-19 22:57:31 +02:00
|
|
|
url,
|
|
|
|
data=data,
|
|
|
|
content_type="application/json",
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers=headers,
|
|
|
|
**extra,
|
2022-06-14 22:44:49 +02:00
|
|
|
)
|
2021-09-10 18:36:56 +02:00
|
|
|
|
2016-12-21 21:29:29 +01:00
|
|
|
@instrument_url
|
2021-12-17 08:14:22 +01:00
|
|
|
def client_delete(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented: bool = False,
|
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2023-12-05 21:14:17 +01:00
|
|
|
encoded = urlencode(info)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["content_type"] = "application/x-www-form-urlencoded"
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-08-10 01:43:12 +02:00
|
|
|
return django_client.delete(
|
2023-10-23 08:00:52 +02:00
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers={
|
|
|
|
"Content-Type": "application/x-www-form-urlencoded", # https://code.djangoproject.com/ticket/33230
|
|
|
|
**(headers or {}),
|
|
|
|
},
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
2023-08-10 01:43:12 +02:00
|
|
|
**extra,
|
2022-06-14 22:44:49 +02:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-03-05 09:31:17 +01:00
|
|
|
@instrument_url
|
2021-12-17 08:14:22 +01:00
|
|
|
def client_options(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-07-19 22:57:31 +02:00
|
|
|
return django_client.options(
|
|
|
|
url, dict(info), follow=follow, secure=secure, headers=headers, **extra
|
|
|
|
)
|
2017-03-05 09:31:17 +01:00
|
|
|
|
2017-08-26 01:24:50 +02:00
|
|
|
@instrument_url
|
2022-06-08 04:52:09 +02:00
|
|
|
def client_head(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2017-08-26 01:24:50 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-07-19 22:57:31 +02:00
|
|
|
return django_client.head(url, info, follow=follow, secure=secure, headers=headers, **extra)
|
2017-08-26 01:24:50 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2020-08-07 04:45:55 +02:00
|
|
|
def client_post(
|
|
|
|
self,
|
|
|
|
url: str,
|
2022-12-04 10:45:50 +01:00
|
|
|
info: Union[str, bytes, Mapping[str, Any]] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented: bool = False,
|
2023-10-20 21:46:57 +02:00
|
|
|
content_type: Optional[str] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-10-20 21:46:57 +02:00
|
|
|
encoded = info
|
|
|
|
if content_type is None:
|
|
|
|
if isinstance(info, dict) and not any(
|
|
|
|
hasattr(value, "read") and callable(value.read) for value in info.values()
|
|
|
|
):
|
|
|
|
content_type = "application/x-www-form-urlencoded"
|
|
|
|
encoded = urlencode(info, doseq=True)
|
|
|
|
else:
|
|
|
|
content_type = MULTIPART_CONTENT
|
2023-08-10 01:43:12 +02:00
|
|
|
return django_client.post(
|
2023-10-20 21:46:57 +02:00
|
|
|
url,
|
|
|
|
encoded,
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
2023-10-23 08:00:52 +02:00
|
|
|
headers={
|
|
|
|
"Content-Type": content_type, # https://code.djangoproject.com/ticket/33230
|
|
|
|
**(headers or {}),
|
|
|
|
},
|
2023-10-20 21:46:57 +02:00
|
|
|
content_type=content_type,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
2023-08-10 01:43:12 +02:00
|
|
|
**extra,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2016-11-17 16:52:28 +01:00
|
|
|
@instrument_url
|
2022-06-08 04:52:09 +02:00
|
|
|
def client_post_request(self, url: str, req: Any) -> "TestHttpResponse":
|
2016-11-17 16:52:28 +01:00
|
|
|
"""
|
|
|
|
We simulate hitting an endpoint here, although we
|
|
|
|
actually resolve the URL manually and hit the view
|
|
|
|
directly. We have this helper method to allow our
|
|
|
|
instrumentation to work for /notify_tornado and
|
|
|
|
future similar methods that require doing funny
|
|
|
|
things to a request object.
|
|
|
|
"""
|
|
|
|
|
|
|
|
match = resolve(url)
|
|
|
|
return match.func(req)
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
@instrument_url
|
2022-06-08 04:52:09 +02:00
|
|
|
def client_get(
|
2022-06-14 22:44:49 +02:00
|
|
|
self,
|
|
|
|
url: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
info: Mapping[str, Any] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
skip_user_agent: bool = False,
|
|
|
|
follow: bool = False,
|
|
|
|
secure: bool = False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers: Optional[Mapping[str, Any]] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented: bool = False,
|
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2017-05-17 21:13:40 +02:00
|
|
|
django_client = self.client # see WRAPPER_COMMENT
|
2022-06-14 22:44:49 +02:00
|
|
|
self.set_http_headers(extra, skip_user_agent)
|
2023-08-10 01:43:12 +02:00
|
|
|
return django_client.get(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
follow=follow,
|
|
|
|
secure=secure,
|
|
|
|
headers=headers,
|
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
|
|
|
**extra,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
example_user_map = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet="hamlet@zulip.com",
|
|
|
|
cordelia="cordelia@zulip.com",
|
|
|
|
iago="iago@zulip.com",
|
|
|
|
prospero="prospero@zulip.com",
|
|
|
|
othello="othello@zulip.com",
|
|
|
|
AARON="AARON@zulip.com",
|
|
|
|
aaron="aaron@zulip.com",
|
|
|
|
ZOE="ZOE@zulip.com",
|
|
|
|
polonius="polonius@zulip.com",
|
|
|
|
desdemona="desdemona@zulip.com",
|
2020-12-22 15:46:00 +01:00
|
|
|
shiva="shiva@zulip.com",
|
2021-02-12 08:20:45 +01:00
|
|
|
webhook_bot="webhook-bot@zulip.com",
|
|
|
|
outgoing_webhook_bot="outgoing-webhook@zulip.com",
|
|
|
|
default_bot="default-bot@zulip.com",
|
2017-05-07 17:21:26 +02:00
|
|
|
)
|
|
|
|
|
2017-05-23 01:26:38 +02:00
|
|
|
mit_user_map = dict(
|
2017-11-03 03:12:25 +01:00
|
|
|
sipbtest="sipbtest@mit.edu",
|
|
|
|
starnine="starnine@mit.edu",
|
|
|
|
espuser="espuser@mit.edu",
|
2017-05-23 01:26:38 +02:00
|
|
|
)
|
|
|
|
|
2018-09-14 12:49:42 +02:00
|
|
|
lear_user_map = dict(
|
|
|
|
cordelia="cordelia@zulip.com",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
king="king@lear.org",
|
2018-09-14 12:49:42 +02:00
|
|
|
)
|
|
|
|
|
2017-05-24 02:42:31 +02:00
|
|
|
# Non-registered test users
|
|
|
|
nonreg_user_map = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
test="test@zulip.com",
|
|
|
|
test1="test1@zulip.com",
|
|
|
|
alice="alice@zulip.com",
|
|
|
|
newuser="newuser@zulip.com",
|
|
|
|
bob="bob@zulip.com",
|
|
|
|
cordelia="cordelia@zulip.com",
|
|
|
|
newguy="newguy@zulip.com",
|
|
|
|
me="me@zulip.com",
|
2017-05-24 02:42:31 +02:00
|
|
|
)
|
|
|
|
|
2019-10-18 18:25:51 +02:00
|
|
|
example_user_ldap_username_map = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet="hamlet",
|
|
|
|
cordelia="cordelia",
|
2019-10-18 18:25:51 +02:00
|
|
|
# aaron's uid in our test directory is "letham".
|
2021-02-12 08:20:45 +01:00
|
|
|
aaron="letham",
|
2019-10-18 18:25:51 +02:00
|
|
|
)
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def nonreg_user(self, name: str) -> UserProfile:
|
2017-05-24 02:42:31 +02:00
|
|
|
email = self.nonreg_user_map[name]
|
2020-03-12 14:17:25 +01:00
|
|
|
return get_user_by_delivery_email(email, get_realm("zulip"))
|
2017-05-24 02:42:31 +02:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def example_user(self, name: str) -> UserProfile:
|
2017-05-07 17:21:26 +02:00
|
|
|
email = self.example_user_map[name]
|
2021-02-12 08:20:45 +01:00
|
|
|
return get_user_by_delivery_email(email, get_realm("zulip"))
|
2017-05-23 01:26:38 +02:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def mit_user(self, name: str) -> UserProfile:
|
2017-05-23 01:26:38 +02:00
|
|
|
email = self.mit_user_map[name]
|
2023-08-19 23:26:20 +02:00
|
|
|
return self.get_user_from_email(email, get_realm("zephyr"))
|
2017-05-07 17:21:26 +02:00
|
|
|
|
2018-09-14 12:49:42 +02:00
|
|
|
def lear_user(self, name: str) -> UserProfile:
|
|
|
|
email = self.lear_user_map[name]
|
2023-08-19 23:26:20 +02:00
|
|
|
return self.get_user_from_email(email, get_realm("lear"))
|
2018-09-14 12:49:42 +02:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def nonreg_email(self, name: str) -> str:
|
2017-05-24 02:42:31 +02:00
|
|
|
return self.nonreg_user_map[name]
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def example_email(self, name: str) -> str:
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.example_user_map[name]
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def mit_email(self, name: str) -> str:
|
2017-05-23 23:35:03 +02:00
|
|
|
return self.mit_user_map[name]
|
|
|
|
|
2021-03-08 11:39:48 +01:00
|
|
|
def notification_bot(self, realm: Realm) -> UserProfile:
|
|
|
|
return get_system_bot(settings.NOTIFICATION_BOT, realm.id)
|
2017-05-08 17:42:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_test_bot(
|
2021-02-12 08:20:45 +01:00
|
|
|
self, short_name: str, user_profile: UserProfile, full_name: str = "Foo Bot", **extras: Any
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> UserProfile:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_profile)
|
2018-01-30 17:05:14 +01:00
|
|
|
bot_info = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"short_name": short_name,
|
|
|
|
"full_name": full_name,
|
2018-01-30 17:05:14 +01:00
|
|
|
}
|
|
|
|
bot_info.update(extras)
|
|
|
|
result = self.client_post("/json/bots", bot_info)
|
2020-07-05 00:50:18 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
bot_email = f"{short_name}-bot@zulip.testserver"
|
2023-08-19 23:26:20 +02:00
|
|
|
bot_profile = self.get_user_from_email(bot_email, user_profile.realm)
|
2020-07-05 00:50:18 +02:00
|
|
|
return bot_profile
|
|
|
|
|
|
|
|
def fail_to_create_test_bot(
|
2021-02-12 08:19:30 +01:00
|
|
|
self,
|
|
|
|
short_name: str,
|
2020-07-05 00:50:18 +02:00
|
|
|
user_profile: UserProfile,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name: str = "Foo Bot",
|
2020-07-05 00:50:18 +02:00
|
|
|
*,
|
|
|
|
assert_json_error_msg: str,
|
|
|
|
**extras: Any,
|
|
|
|
) -> None:
|
|
|
|
self.login_user(user_profile)
|
|
|
|
bot_info = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"short_name": short_name,
|
|
|
|
"full_name": full_name,
|
2020-07-05 00:50:18 +02:00
|
|
|
}
|
|
|
|
bot_info.update(extras)
|
|
|
|
result = self.client_post("/json/bots", bot_info)
|
|
|
|
self.assert_json_error(result, assert_json_error_msg)
|
2017-10-25 17:17:17 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def _get_page_params(self, result: "TestHttpResponse") -> Dict[str, Any]:
|
2021-05-14 00:16:30 +02:00
|
|
|
"""Helper for parsing page_params after fetching the web app's home view."""
|
2020-10-02 00:14:25 +02:00
|
|
|
doc = lxml.html.document_fromstring(result.content)
|
2022-02-19 03:03:08 +01:00
|
|
|
div = cast(lxml.html.HtmlMixin, doc).get_element_by_id("page-params")
|
|
|
|
assert div is not None
|
2020-10-02 00:14:25 +02:00
|
|
|
page_params_json = div.get("data-params")
|
2022-02-19 03:03:08 +01:00
|
|
|
assert page_params_json is not None
|
2020-10-02 00:14:25 +02:00
|
|
|
page_params = orjson.loads(page_params_json)
|
|
|
|
return page_params
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def check_rendered_logged_in_app(self, result: "TestHttpResponse") -> None:
|
2020-10-02 00:14:25 +02:00
|
|
|
"""Verifies that a visit of / was a 200 that rendered page_params
|
2021-06-15 18:03:32 +02:00
|
|
|
and not for a (logged-out) spectator."""
|
2020-10-02 00:14:25 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
page_params = self._get_page_params(result)
|
2021-06-15 18:03:32 +02:00
|
|
|
# It is important to check `is_spectator` to verify
|
2020-10-02 00:14:25 +02:00
|
|
|
# that we treated this request as a normal logged-in session,
|
2021-06-15 18:03:32 +02:00
|
|
|
# not as a spectator.
|
|
|
|
self.assertEqual(page_params["is_spectator"], False)
|
2020-10-02 00:14:25 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def login_with_return(
|
2022-06-14 22:44:49 +02:00
|
|
|
self, email: str, password: Optional[str] = None, **extra: str
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2016-11-10 19:30:09 +01:00
|
|
|
if password is None:
|
|
|
|
password = initial_password(email)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2022-06-14 22:44:49 +02:00
|
|
|
"/accounts/login/",
|
|
|
|
{"username": email, "password": password},
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-29 20:32:49 +01:00
|
|
|
self.assertNotEqual(result.status_code, 500)
|
|
|
|
return result
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
def login(self, name: str) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-03-06 18:40:46 +01:00
|
|
|
Use this for really simple tests where you just need
|
|
|
|
to be logged in as some user, but don't need the actual
|
|
|
|
user object for anything else. Try to use 'hamlet' for
|
|
|
|
non-admins and 'iago' for admins:
|
|
|
|
|
|
|
|
self.login('hamlet')
|
|
|
|
|
|
|
|
Try to use 'cordelia' or 'othello' as "other" users.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
assert "@" not in name, "use login_by_email for email logins"
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user(name)
|
|
|
|
self.login_user(user)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def login_by_email(self, email: str, password: str) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
realm = get_realm("zulip")
|
2020-06-26 19:29:37 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
2020-03-06 18:40:46 +01:00
|
|
|
self.assertTrue(
|
|
|
|
self.client.login(
|
2020-06-26 19:29:37 +02:00
|
|
|
request=request,
|
2020-03-06 18:40:46 +01:00
|
|
|
username=email,
|
|
|
|
password=password,
|
|
|
|
realm=realm,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2020-03-06 18:40:46 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assert_login_failure(self, email: str, password: str) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-09-01 20:08:56 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
2020-03-06 18:40:46 +01:00
|
|
|
self.assertFalse(
|
|
|
|
self.client.login(
|
2021-09-01 20:08:56 +02:00
|
|
|
request=request,
|
2020-03-06 18:40:46 +01:00
|
|
|
username=email,
|
|
|
|
password=password,
|
|
|
|
realm=realm,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2020-03-06 18:40:46 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def login_user(self, user_profile: UserProfile) -> None:
|
|
|
|
email = user_profile.delivery_email
|
|
|
|
realm = user_profile.realm
|
|
|
|
password = initial_password(email)
|
2020-06-26 19:29:37 +02:00
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
|
|
|
self.client.login(request=request, username=email, password=password, realm=realm)
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
def login_2fa(self, user_profile: UserProfile) -> None:
|
|
|
|
"""
|
|
|
|
We need this function to call request.session.save().
|
|
|
|
do_two_factor_login doesn't save session; in normal request-response
|
|
|
|
cycle this doesn't matter because middleware will save the session
|
|
|
|
when it finds it dirty; however,in tests we will have to do that
|
|
|
|
explicitly.
|
|
|
|
"""
|
|
|
|
request = HttpRequest()
|
|
|
|
request.session = self.client.session
|
|
|
|
request.user = user_profile
|
|
|
|
do_two_factor_login(request, user_profile)
|
|
|
|
request.session.save()
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def logout(self) -> None:
|
2017-04-18 03:23:32 +02:00
|
|
|
self.client.logout()
|
|
|
|
|
2022-06-28 02:04:33 +02:00
|
|
|
def register(self, email: str, password: str, subdomain: str = DEFAULT_SUBDOMAIN) -> None:
|
|
|
|
response = self.client_post("/accounts/home/", {"email": email}, subdomain=subdomain)
|
|
|
|
self.assertEqual(response.status_code, 302)
|
2023-12-05 21:14:17 +01:00
|
|
|
self.assertEqual(response["Location"], f"/accounts/send_confirm/?email={quote(email)}")
|
2022-06-28 02:04:33 +02:00
|
|
|
response = self.submit_reg_form_for_user(email, password, subdomain=subdomain)
|
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertEqual(response["Location"], f"http://{Realm.host_for_subdomain(subdomain)}/")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def submit_reg_form_for_user(
|
2021-02-12 08:19:30 +01:00
|
|
|
self,
|
|
|
|
email: str,
|
2021-06-24 23:54:50 +02:00
|
|
|
password: Optional[str],
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_name: str = "Zulip Test",
|
|
|
|
realm_subdomain: str = "zuliptest",
|
2021-02-12 08:20:45 +01:00
|
|
|
from_confirmation: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
full_name: Optional[str] = None,
|
2021-02-12 08:20:45 +01:00
|
|
|
timezone: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_in_root_domain: Optional[str] = None,
|
|
|
|
default_stream_groups: Sequence[str] = [],
|
2020-12-04 19:45:58 +01:00
|
|
|
source_realm_id: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
key: Optional[str] = None,
|
2021-12-17 08:14:22 +01:00
|
|
|
realm_type: int = Realm.ORG_TYPES["business"]["id"],
|
2023-09-12 21:58:58 +02:00
|
|
|
realm_default_language: str = "en",
|
2022-01-05 23:53:40 +01:00
|
|
|
enable_marketing_emails: Optional[bool] = None,
|
2023-02-13 17:40:16 +01:00
|
|
|
email_address_visibility: Optional[int] = None,
|
2021-12-17 08:14:22 +01:00
|
|
|
is_demo_organization: bool = False,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Stage two of the two-step registration process.
|
|
|
|
|
|
|
|
If things are working correctly the account should be fully
|
|
|
|
registered after this call.
|
|
|
|
|
2022-06-14 22:44:49 +02:00
|
|
|
You can pass the HTTP_HOST variable for subdomains via extra.
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
2017-02-08 05:04:14 +01:00
|
|
|
if full_name is None:
|
|
|
|
full_name = email.replace("@", "_")
|
2017-10-19 08:23:27 +02:00
|
|
|
payload = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": full_name,
|
|
|
|
"realm_name": realm_name,
|
|
|
|
"realm_subdomain": realm_subdomain,
|
2021-06-24 20:05:06 +02:00
|
|
|
"realm_type": realm_type,
|
2023-09-12 21:58:58 +02:00
|
|
|
"realm_default_language": realm_default_language,
|
2021-02-12 08:20:45 +01:00
|
|
|
"key": key if key is not None else find_key_by_email(email),
|
|
|
|
"timezone": timezone,
|
|
|
|
"terms": True,
|
|
|
|
"from_confirmation": from_confirmation,
|
|
|
|
"default_stream_group": default_stream_groups,
|
2020-12-04 19:45:58 +01:00
|
|
|
"source_realm_id": source_realm_id,
|
2021-09-09 06:50:40 +02:00
|
|
|
"is_demo_organization": is_demo_organization,
|
2024-03-26 06:14:16 +01:00
|
|
|
"how_realm_creator_found_zulip": "other",
|
|
|
|
"how_realm_creator_found_zulip_extra_context": "I found it on the internet.",
|
2017-10-19 08:23:27 +02:00
|
|
|
}
|
2022-01-05 23:53:40 +01:00
|
|
|
if enable_marketing_emails is not None:
|
|
|
|
payload["enable_marketing_emails"] = enable_marketing_emails
|
2023-02-13 17:40:16 +01:00
|
|
|
if email_address_visibility is not None:
|
|
|
|
payload["email_address_visibility"] = email_address_visibility
|
2021-06-24 23:54:50 +02:00
|
|
|
if password is not None:
|
|
|
|
payload["password"] = password
|
2017-10-19 08:30:40 +02:00
|
|
|
if realm_in_root_domain is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["realm_in_root_domain"] = realm_in_root_domain
|
2022-06-14 22:44:49 +02:00
|
|
|
return self.client_post(
|
|
|
|
"/accounts/register/",
|
|
|
|
payload,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2023-03-21 14:11:44 +01:00
|
|
|
def submit_realm_creation_form(
|
|
|
|
self,
|
|
|
|
email: str,
|
|
|
|
*,
|
|
|
|
realm_subdomain: str,
|
|
|
|
realm_name: str,
|
|
|
|
realm_type: int = Realm.ORG_TYPES["business"]["id"],
|
2023-09-12 21:58:58 +02:00
|
|
|
realm_default_language: str = "en",
|
2023-03-21 14:11:44 +01:00
|
|
|
realm_in_root_domain: Optional[str] = None,
|
|
|
|
) -> "TestHttpResponse":
|
|
|
|
payload = {
|
|
|
|
"email": email,
|
|
|
|
"realm_name": realm_name,
|
|
|
|
"realm_type": realm_type,
|
2023-09-12 21:58:58 +02:00
|
|
|
"realm_default_language": realm_default_language,
|
2023-03-21 14:11:44 +01:00
|
|
|
"realm_subdomain": realm_subdomain,
|
|
|
|
}
|
|
|
|
if realm_in_root_domain is not None:
|
|
|
|
payload["realm_in_root_domain"] = realm_in_root_domain
|
|
|
|
return self.client_post(
|
|
|
|
"/new/",
|
|
|
|
payload,
|
|
|
|
)
|
|
|
|
|
2020-07-05 02:14:06 +02:00
|
|
|
def get_confirmation_url_from_outbox(
|
|
|
|
self,
|
|
|
|
email_address: str,
|
|
|
|
*,
|
2021-02-12 08:19:30 +01:00
|
|
|
url_pattern: Optional[str] = None,
|
2021-05-26 19:40:12 +02:00
|
|
|
email_subject_contains: Optional[str] = None,
|
|
|
|
email_body_contains: Optional[str] = None,
|
2020-07-05 02:14:06 +02:00
|
|
|
) -> str:
|
2016-11-10 19:30:09 +01:00
|
|
|
from django.core.mail import outbox
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-30 22:56:14 +01:00
|
|
|
if url_pattern is None:
|
|
|
|
# This is a bit of a crude heuristic, but good enough for most tests.
|
2018-07-02 00:05:24 +02:00
|
|
|
url_pattern = settings.EXTERNAL_HOST + r"(\S+)>"
|
2016-11-10 19:30:09 +01:00
|
|
|
for message in reversed(outbox):
|
2020-06-05 23:26:35 +02:00
|
|
|
if any(
|
2021-02-12 08:19:30 +01:00
|
|
|
addr == email_address or addr.endswith(f" <{email_address}>") for addr in message.to
|
2020-06-05 23:26:35 +02:00
|
|
|
):
|
2023-05-11 02:34:34 +02:00
|
|
|
match = re.search(url_pattern, str(message.body))
|
2020-07-05 02:14:06 +02:00
|
|
|
assert match is not None
|
2021-05-26 19:40:12 +02:00
|
|
|
|
|
|
|
if email_subject_contains:
|
|
|
|
self.assertIn(email_subject_contains, message.subject)
|
|
|
|
|
|
|
|
if email_body_contains:
|
|
|
|
self.assertIn(email_body_contains, message.body)
|
|
|
|
|
2020-07-05 02:14:06 +02:00
|
|
|
[confirmation_url] = match.groups()
|
|
|
|
return confirmation_url
|
2022-12-11 10:48:05 +01:00
|
|
|
raise AssertionError("Couldn't find a confirmation email.")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
def encode_uuid(self, uuid: str) -> str:
|
2017-04-27 11:41:27 +02:00
|
|
|
"""
|
|
|
|
identifier: Can be an email or a remote server uuid.
|
|
|
|
"""
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
if uuid in self.API_KEYS:
|
|
|
|
api_key = self.API_KEYS[uuid]
|
2016-10-27 23:55:31 +02:00
|
|
|
else:
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
api_key = get_remote_server_by_uuid(uuid).api_key
|
|
|
|
self.API_KEYS[uuid] = api_key
|
2016-10-27 23:55:31 +02:00
|
|
|
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
return self.encode_credentials(uuid, api_key)
|
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
def encode_user(self, user: UserProfile) -> str:
|
|
|
|
email = user.delivery_email
|
|
|
|
api_key = user.api_key
|
|
|
|
return self.encode_credentials(email, api_key)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def encode_email(self, email: str, realm: str = "zulip") -> str:
|
2020-03-10 11:48:26 +01:00
|
|
|
# TODO: use encode_user where possible
|
2021-02-12 08:20:45 +01:00
|
|
|
assert "@" in email
|
2020-03-10 11:48:26 +01:00
|
|
|
user = get_user_by_delivery_email(email, get_realm(realm))
|
|
|
|
api_key = get_api_key(user)
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
|
|
|
|
return self.encode_credentials(email, api_key)
|
|
|
|
|
|
|
|
def encode_credentials(self, identifier: str, api_key: str) -> str:
|
|
|
|
"""
|
|
|
|
identifier: Can be an email or a remote server uuid.
|
|
|
|
"""
|
2020-06-10 06:41:04 +02:00
|
|
|
credentials = f"{identifier}:{api_key}"
|
2021-08-02 23:20:39 +02:00
|
|
|
return "Basic " + base64.b64encode(credentials.encode()).decode()
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def uuid_get(
|
2022-10-06 11:56:48 +02:00
|
|
|
self, identifier: str, url: str, info: Mapping[str, Any] = {}, **extra: str
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_uuid(identifier)
|
|
|
|
return self.client_get(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def uuid_post(
|
|
|
|
self,
|
|
|
|
identifier: str,
|
|
|
|
url: str,
|
2022-12-04 10:45:50 +01:00
|
|
|
info: Union[str, bytes, Mapping[str, Any]] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_uuid(identifier)
|
|
|
|
return self.client_post(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def api_get(
|
2022-10-06 11:56:48 +02:00
|
|
|
self, user: UserProfile, url: str, info: Mapping[str, Any] = {}, **extra: str
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_user(user)
|
|
|
|
return self.client_get(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
2017-12-14 19:02:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_post(
|
2021-12-17 08:14:22 +01:00
|
|
|
self,
|
|
|
|
user: UserProfile,
|
|
|
|
url: str,
|
2022-12-04 10:45:50 +01:00
|
|
|
info: Union[str, bytes, Mapping[str, Any]] = {},
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented: bool = False,
|
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_user(user)
|
|
|
|
return self.client_post(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=intentionally_undocumented,
|
|
|
|
**extra,
|
|
|
|
)
|
2017-12-14 19:02:02 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def api_patch(
|
2022-10-06 11:56:48 +02:00
|
|
|
self, user: UserProfile, url: str, info: Mapping[str, Any] = {}, **extra: str
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_user(user)
|
2021-12-17 08:14:22 +01:00
|
|
|
return self.client_patch(
|
2022-06-14 22:44:49 +02:00
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
2021-12-17 08:14:22 +01:00
|
|
|
)
|
2017-12-14 19:02:02 +01:00
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def api_delete(
|
2022-10-06 11:56:48 +02:00
|
|
|
self, user: UserProfile, url: str, info: Mapping[str, Any] = {}, **extra: str
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_user(user)
|
|
|
|
return self.client_delete(
|
|
|
|
url,
|
|
|
|
info,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
2017-12-14 19:02:02 +01:00
|
|
|
|
2020-03-09 21:41:26 +01:00
|
|
|
def get_streams(self, user_profile: UserProfile) -> List[str]:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
2023-07-16 15:27:39 +02:00
|
|
|
Helper function to get the active stream names for a user
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
2023-07-16 15:27:39 +02:00
|
|
|
|
|
|
|
return list(
|
|
|
|
Stream.objects.filter(
|
|
|
|
id__in=get_subscribed_stream_ids_for_user(user_profile)
|
|
|
|
).values_list("name", flat=True)
|
2017-10-29 17:11:11 +01:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_personal_message(
|
|
|
|
self,
|
|
|
|
from_user: UserProfile,
|
|
|
|
to_user: UserProfile,
|
|
|
|
content: str = "test content",
|
2023-12-14 07:11:00 +01:00
|
|
|
*,
|
|
|
|
read_by_sender: bool = True,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2020-03-08 03:30:07 +01:00
|
|
|
recipient_list = [to_user.id]
|
2023-12-14 07:11:00 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
2017-10-27 19:28:02 +02:00
|
|
|
|
2023-10-09 13:11:32 +02:00
|
|
|
sent_message_result = check_send_message(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
from_user,
|
|
|
|
sending_client,
|
|
|
|
"private",
|
|
|
|
recipient_list,
|
|
|
|
None,
|
|
|
|
content,
|
2023-12-14 07:11:00 +01:00
|
|
|
read_by_sender=read_by_sender,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
)
|
2023-10-09 13:11:32 +02:00
|
|
|
return sent_message_result.message_id
|
2017-10-27 19:28:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_huddle_message(
|
|
|
|
self,
|
|
|
|
from_user: UserProfile,
|
|
|
|
to_users: List[UserProfile],
|
|
|
|
content: str = "test content",
|
2023-12-14 07:11:00 +01:00
|
|
|
*,
|
|
|
|
read_by_sender: bool = True,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2020-03-08 03:30:07 +01:00
|
|
|
to_user_ids = [u.id for u in to_users]
|
2021-02-12 08:19:30 +01:00
|
|
|
assert len(to_user_ids) >= 2
|
2017-10-27 19:53:08 +02:00
|
|
|
|
2023-12-14 07:11:00 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
2017-10-27 19:53:08 +02:00
|
|
|
|
2023-10-09 13:11:32 +02:00
|
|
|
sent_message_result = check_send_message(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
from_user,
|
|
|
|
sending_client,
|
|
|
|
"private",
|
|
|
|
to_user_ids,
|
|
|
|
None,
|
|
|
|
content,
|
2023-12-14 07:11:00 +01:00
|
|
|
read_by_sender=read_by_sender,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
)
|
2023-10-09 13:11:32 +02:00
|
|
|
return sent_message_result.message_id
|
2017-10-27 19:53:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_stream_message(
|
|
|
|
self,
|
|
|
|
sender: UserProfile,
|
|
|
|
stream_name: str,
|
|
|
|
content: str = "test content",
|
|
|
|
topic_name: str = "test",
|
|
|
|
recipient_realm: Optional[Realm] = None,
|
2023-12-14 07:11:00 +01:00
|
|
|
*,
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
allow_unsubscribed_sender: bool = False,
|
2023-12-14 07:11:00 +01:00
|
|
|
read_by_sender: bool = True,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2023-12-14 07:11:00 +01:00
|
|
|
(sending_client, _) = Client.objects.get_or_create(name="test suite")
|
2017-10-27 17:57:23 +02:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
message_id = check_send_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
client=sending_client,
|
|
|
|
stream_name=stream_name,
|
2024-01-13 09:55:16 +01:00
|
|
|
topic_name=topic_name,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
body=content,
|
|
|
|
realm=recipient_realm,
|
2023-12-14 07:11:00 +01:00
|
|
|
read_by_sender=read_by_sender,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
)
|
2023-01-18 02:59:37 +01:00
|
|
|
if (
|
|
|
|
not UserMessage.objects.filter(user_profile=sender, message_id=message_id).exists()
|
|
|
|
and not sender.is_bot
|
|
|
|
and not allow_unsubscribed_sender
|
|
|
|
):
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
It appears that the sender did not get a UserMessage row, which is
|
|
|
|
almost certainly an artificial symptom that in your test setup you
|
|
|
|
have decided to send a message to a stream without the sender being
|
|
|
|
subscribed.
|
|
|
|
|
2023-06-06 22:07:28 +02:00
|
|
|
Please do self.subscribe(<user for {sender.full_name}>, {stream_name!r}) first.
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
|
|
|
Or choose a stream that the user is already subscribed to:
|
|
|
|
|
|
|
|
{self.subscribed_stream_name_list(sender)}
|
|
|
|
"""
|
2023-01-18 02:59:37 +01:00
|
|
|
)
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
|
|
|
|
return message_id
|
2017-10-27 17:57:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_messages_response(
|
|
|
|
self,
|
|
|
|
anchor: Union[int, str] = 1,
|
|
|
|
num_before: int = 100,
|
|
|
|
num_after: int = 100,
|
|
|
|
use_first_unread_anchor: bool = False,
|
2022-11-11 03:32:09 +01:00
|
|
|
include_anchor: bool = True,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[str, List[Dict[str, Any]]]:
|
|
|
|
post_params = {
|
|
|
|
"anchor": anchor,
|
|
|
|
"num_before": num_before,
|
|
|
|
"num_after": num_after,
|
|
|
|
"use_first_unread_anchor": orjson.dumps(use_first_unread_anchor).decode(),
|
2022-11-11 03:32:09 +01:00
|
|
|
"include_anchor": orjson.dumps(include_anchor).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-11-10 19:30:09 +01:00
|
|
|
result = self.client_get("/json/messages", dict(post_params))
|
2017-08-17 08:46:39 +02:00
|
|
|
data = result.json()
|
2018-02-14 04:44:41 +01:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_messages(
|
|
|
|
self,
|
|
|
|
anchor: Union[str, int] = 1,
|
|
|
|
num_before: int = 100,
|
|
|
|
num_after: int = 100,
|
|
|
|
use_first_unread_anchor: bool = False,
|
|
|
|
) -> List[Dict[str, Any]]:
|
2018-02-14 04:44:41 +01:00
|
|
|
data = self.get_messages_response(anchor, num_before, num_after, use_first_unread_anchor)
|
2021-02-12 08:20:45 +01:00
|
|
|
return data["messages"]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def users_subscribed_to_stream(self, stream_name: str, realm: Realm) -> List[UserProfile]:
|
2016-11-10 19:30:09 +01:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient, active=True)
|
|
|
|
|
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
2023-11-07 15:49:13 +01:00
|
|
|
def not_long_term_idle_subscriber_ids(self, stream_name: str, realm: Realm) -> Set[int]:
|
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
|
|
|
|
subscriptions = Subscription.objects.filter(
|
|
|
|
recipient=recipient, active=True, is_user_active=True
|
|
|
|
).exclude(user_profile__long_term_idle=True)
|
|
|
|
user_profile_ids = set(subscriptions.values_list("user_profile_id", flat=True))
|
|
|
|
|
|
|
|
return user_profile_ids
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def assert_json_success(
|
2022-08-25 18:31:21 +02:00
|
|
|
self,
|
|
|
|
result: Union["TestHttpResponse", HttpResponse],
|
|
|
|
*,
|
|
|
|
ignored_parameters: Optional[List[str]] = None,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> Dict[str, Any]:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Successful POSTs return a 200 and JSON of the form {"result": "success",
|
|
|
|
"msg": ""}.
|
|
|
|
"""
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
json = orjson.loads(result.content)
|
2020-10-09 02:17:33 +02:00
|
|
|
except orjson.JSONDecodeError: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
json = {"msg": "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, 200, json["msg"])
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "success")
|
|
|
|
# We have a msg key for consistency with errors, but it typically has an
|
|
|
|
# empty value.
|
|
|
|
self.assertIn("msg", json)
|
2017-08-29 06:33:10 +02:00
|
|
|
self.assertNotEqual(json["msg"], "Error parsing JSON in response!")
|
2022-08-25 18:31:21 +02:00
|
|
|
# Check ignored parameters.
|
|
|
|
if ignored_parameters is None:
|
|
|
|
self.assertNotIn("ignored_parameters_unsupported", json)
|
|
|
|
else:
|
|
|
|
self.assertIn("ignored_parameters_unsupported", json)
|
|
|
|
self.assert_length(json["ignored_parameters_unsupported"], len(ignored_parameters))
|
|
|
|
for param in ignored_parameters:
|
|
|
|
self.assertTrue(param in json["ignored_parameters_unsupported"])
|
2016-11-10 19:30:09 +01:00
|
|
|
return json
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def get_json_error(self, result: "TestHttpResponse", status_code: int = 400) -> str:
|
2017-08-29 06:33:10 +02:00
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
json = orjson.loads(result.content)
|
2020-10-09 02:17:33 +02:00
|
|
|
except orjson.JSONDecodeError: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
json = {"msg": "Error parsing JSON in response!"}
|
|
|
|
self.assertEqual(result.status_code, status_code, msg=json.get("msg"))
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertEqual(json.get("result"), "error")
|
2021-02-12 08:20:45 +01:00
|
|
|
return json["msg"]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def assert_json_error(
|
|
|
|
self, result: "TestHttpResponse", msg: str, status_code: int = 400
|
|
|
|
) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
|
|
|
Invalid POSTs return an error status code and JSON of the form
|
|
|
|
{"result": "error", "msg": "reason"}.
|
|
|
|
"""
|
|
|
|
self.assertEqual(self.get_json_error(result, status_code=status_code), msg)
|
|
|
|
|
2024-04-17 05:28:33 +02:00
|
|
|
def assert_length(self, items: Collection[Any] | ValuesQuerySet[Any, Any], count: int) -> None:
|
2017-10-06 23:28:22 +02:00
|
|
|
actual_count = len(items)
|
|
|
|
if actual_count != count: # nocoverage
|
2021-05-17 05:39:37 +02:00
|
|
|
print("\nITEMS:\n")
|
2017-10-06 23:28:22 +02:00
|
|
|
for item in items:
|
|
|
|
print(item)
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f"\nexpected length: {count}\nactual length: {actual_count}")
|
2023-05-27 02:30:33 +02:00
|
|
|
raise AssertionError(f"{type(items)} is of unexpected size!")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2023-08-11 15:28:22 +02:00
|
|
|
@contextmanager
|
|
|
|
def assert_memcached_count(self, count: int) -> Iterator[None]:
|
|
|
|
with cache_tries_captured() as cache_tries:
|
|
|
|
yield
|
|
|
|
self.assert_length(cache_tries, count)
|
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
@contextmanager
|
|
|
|
def assert_database_query_count(
|
|
|
|
self, count: int, include_savepoints: bool = False, keep_cache_warm: bool = False
|
|
|
|
) -> Iterator[None]:
|
|
|
|
"""
|
|
|
|
This captures the queries executed and check the total number of queries.
|
|
|
|
Useful when minimizing unnecessary roundtrips to the database is important.
|
|
|
|
"""
|
|
|
|
with queries_captured(
|
|
|
|
include_savepoints=include_savepoints, keep_cache_warm=keep_cache_warm
|
|
|
|
) as queries:
|
|
|
|
yield
|
|
|
|
actual_count = len(queries)
|
|
|
|
if actual_count != count: # nocoverage
|
|
|
|
print("\nITEMS:\n")
|
|
|
|
for index, query in enumerate(queries):
|
2023-06-06 22:07:28 +02:00
|
|
|
print(f"#{index + 1}\nsql: {query.sql}\ntime: {query.time}\n")
|
2022-10-15 22:47:40 +02:00
|
|
|
print(f"expected count: {count}\nactual count: {actual_count}")
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
|
|
|
{count} queries expected, got {actual_count}.
|
|
|
|
This is a performance-critical code path, where we check
|
|
|
|
the number of database queries used in order to avoid accidental regressions.
|
2022-10-24 08:04:32 +02:00
|
|
|
If an unnecessary query was removed or the new query is necessary, you should
|
|
|
|
update this test, and explain what queries we added/removed in the pull request
|
|
|
|
and why any new queries can't be avoided."""
|
2022-10-15 22:47:40 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assert_json_error_contains(
|
2022-06-08 04:52:09 +02:00
|
|
|
self, result: "TestHttpResponse", msg_substring: str, status_code: int = 400
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assertIn(msg_substring, self.get_json_error(result, status_code=status_code))
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def assert_in_response(
|
|
|
|
self, substring: str, response: Union["TestHttpResponse", HttpResponse]
|
|
|
|
) -> None:
|
2021-08-02 23:20:39 +02:00
|
|
|
self.assertIn(substring, response.content.decode())
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def assert_in_success_response(
|
2022-08-15 23:22:34 +02:00
|
|
|
self, substrings: List[str], response: Union["TestHttpResponse", HttpResponse]
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> None:
|
2016-11-19 21:54:00 +01:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2021-08-02 23:20:39 +02:00
|
|
|
decoded = response.content.decode()
|
2016-11-19 21:54:00 +01:00
|
|
|
for substring in substrings:
|
|
|
|
self.assertIn(substring, decoded)
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def assert_not_in_success_response(
|
2022-08-15 23:22:34 +02:00
|
|
|
self, substrings: List[str], response: Union["TestHttpResponse", HttpResponse]
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> None:
|
2017-04-10 12:35:56 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2021-08-02 23:20:39 +02:00
|
|
|
decoded = response.content.decode()
|
2017-04-10 12:35:56 +02:00
|
|
|
for substring in substrings:
|
|
|
|
self.assertNotIn(substring, decoded)
|
|
|
|
|
2019-05-26 22:12:46 +02:00
|
|
|
def assert_logged_in_user_id(self, user_id: Optional[int]) -> None:
|
|
|
|
"""
|
|
|
|
Verifies the user currently logged in for the test client has the provided user_id.
|
|
|
|
Pass None to verify no user is logged in.
|
|
|
|
"""
|
|
|
|
self.assertEqual(get_session_dict_user(self.client.session), user_id)
|
|
|
|
|
2023-07-16 12:08:57 +02:00
|
|
|
def assert_message_stream_name(self, message: Message, stream_name: str) -> None:
|
|
|
|
self.assertEqual(message.recipient.type, Recipient.STREAM)
|
|
|
|
stream_id = message.recipient.type_id
|
|
|
|
stream = Stream.objects.get(id=stream_id)
|
|
|
|
self.assertEqual(stream.recipient_id, message.recipient_id)
|
|
|
|
self.assertEqual(stream.name, stream_name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def webhook_fixture_data(self, type: str, action: str, file_type: str = "json") -> str:
|
2017-11-04 18:03:59 +01:00
|
|
|
fn = os.path.join(
|
|
|
|
os.path.dirname(__file__),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
f"../webhooks/{type}/fixtures/{action}.{file_type}",
|
2017-11-04 18:03:59 +01:00
|
|
|
)
|
2020-10-24 09:33:54 +02:00
|
|
|
with open(fn) as f:
|
|
|
|
return f.read()
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def fixture_file_name(self, file_name: str, type: str = "") -> str:
|
2019-03-26 12:46:54 +01:00
|
|
|
return os.path.join(
|
2018-04-20 03:57:21 +02:00
|
|
|
os.path.dirname(__file__),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
f"../tests/fixtures/{type}/{file_name}",
|
2018-04-20 03:57:21 +02:00
|
|
|
)
|
2019-03-26 12:46:54 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def fixture_data(self, file_name: str, type: str = "") -> str:
|
2019-03-26 12:46:54 +01:00
|
|
|
fn = self.fixture_file_name(file_name, type)
|
2020-10-24 09:33:54 +02:00
|
|
|
with open(fn) as f:
|
|
|
|
return f.read()
|
2018-04-20 03:57:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def make_stream(
|
|
|
|
self,
|
|
|
|
stream_name: str,
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
invite_only: bool = False,
|
|
|
|
is_web_public: bool = False,
|
|
|
|
history_public_to_subscribers: Optional[bool] = None,
|
|
|
|
) -> Stream:
|
2016-11-10 19:30:09 +01:00
|
|
|
if realm is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2018-05-03 00:07:08 +02:00
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm, invite_only, history_public_to_subscribers
|
|
|
|
)
|
2024-04-02 18:39:18 +02:00
|
|
|
administrators_user_group = NamedUserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.ADMINISTRATORS, realm=realm, is_system_group=True
|
2022-07-13 20:44:28 +02:00
|
|
|
)
|
2018-05-02 17:36:26 +02:00
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
try:
|
|
|
|
stream = Stream.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
name=stream_name,
|
|
|
|
invite_only=invite_only,
|
2020-07-23 20:34:38 +02:00
|
|
|
is_web_public=is_web_public,
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
2022-07-13 20:44:28 +02:00
|
|
|
can_remove_subscribers_group=administrators_user_group,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2017-03-05 09:01:49 +01:00
|
|
|
except IntegrityError: # nocoverage -- this is for bugs in the tests
|
2021-02-12 08:19:30 +01:00
|
|
|
raise Exception(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"""
|
2020-06-13 08:57:35 +02:00
|
|
|
{stream_name} already exists
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
Please call make_stream with a stream name
|
2021-02-12 08:20:45 +01:00
|
|
|
that is not already in use."""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
stream.recipient = recipient
|
|
|
|
stream.save(update_fields=["recipient"])
|
2016-11-10 19:30:09 +01:00
|
|
|
return stream
|
|
|
|
|
2019-09-18 15:04:17 +02:00
|
|
|
INVALID_STREAM_ID = 999999
|
2019-10-22 07:14:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_stream_id(self, name: str, realm: Optional[Realm] = None) -> int:
|
2019-09-18 15:04:17 +02:00
|
|
|
if not realm:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-09-18 15:04:17 +02:00
|
|
|
try:
|
|
|
|
stream = get_realm_stream(name, realm.id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return self.INVALID_STREAM_ID
|
|
|
|
return stream.id
|
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
# Subscribe to a stream directly
|
2023-07-11 13:13:09 +02:00
|
|
|
def subscribe(
|
2023-10-26 18:14:01 +02:00
|
|
|
self,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_name: str,
|
|
|
|
invite_only: bool = False,
|
|
|
|
is_web_public: bool = False,
|
2023-07-11 13:13:09 +02:00
|
|
|
) -> Stream:
|
2020-10-13 15:16:27 +02:00
|
|
|
realm = user_profile.realm
|
2017-08-25 06:01:29 +02:00
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
2023-07-11 13:13:09 +02:00
|
|
|
stream, from_stream_creation = create_stream_if_needed(
|
2023-10-26 18:14:01 +02:00
|
|
|
realm, stream_name, invite_only=invite_only, is_web_public=is_web_public
|
2023-07-11 13:13:09 +02:00
|
|
|
)
|
2021-04-02 18:33:28 +02:00
|
|
|
bulk_add_subscriptions(realm, [stream], [user_profile], acting_user=None)
|
2017-08-25 06:01:29 +02:00
|
|
|
return stream
|
|
|
|
|
2018-05-11 01:40:45 +02:00
|
|
|
def unsubscribe(self, user_profile: UserProfile, stream_name: str) -> None:
|
2021-12-24 14:29:40 +01:00
|
|
|
realm = user_profile.realm
|
2017-08-25 06:23:11 +02:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2021-12-24 14:29:40 +01:00
|
|
|
bulk_remove_subscriptions(realm, [user_profile], [stream], acting_user=None)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
# Subscribe to a stream by making an API request
|
2021-02-12 08:19:30 +01:00
|
|
|
def common_subscribe_to_streams(
|
|
|
|
self,
|
|
|
|
user: UserProfile,
|
|
|
|
streams: Iterable[str],
|
2022-10-06 11:56:48 +02:00
|
|
|
extra_post_data: Mapping[str, Any] = {},
|
2021-02-12 08:19:30 +01:00
|
|
|
invite_only: bool = False,
|
|
|
|
is_web_public: bool = False,
|
|
|
|
allow_fail: bool = False,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2021-02-12 08:19:30 +01:00
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"subscriptions": orjson.dumps([{"name": stream} for stream in streams]).decode(),
|
|
|
|
"is_web_public": orjson.dumps(is_web_public).decode(),
|
|
|
|
"invite_only": orjson.dumps(invite_only).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-11-10 19:30:09 +01:00
|
|
|
post_data.update(extra_post_data)
|
2022-06-14 22:44:49 +02:00
|
|
|
result = self.api_post(
|
|
|
|
user,
|
|
|
|
"/api/v1/users/me/subscriptions",
|
|
|
|
post_data,
|
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
2020-06-17 23:49:33 +02:00
|
|
|
if not allow_fail:
|
|
|
|
self.assert_json_success(result)
|
2016-11-10 19:30:09 +01:00
|
|
|
return result
|
|
|
|
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
def subscribed_stream_name_list(self, user: UserProfile) -> str:
|
|
|
|
# This is currently only used for producing error messages.
|
|
|
|
subscribed_streams = gather_subscriptions(user)[0]
|
|
|
|
|
|
|
|
return "".join(sorted(f" * {stream['name']}\n" for stream in subscribed_streams))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_user_subscribed_only_to_streams(self, user_name: str, streams: List[Stream]) -> None:
|
2017-11-16 22:12:31 +01:00
|
|
|
streams = sorted(streams, key=lambda x: x.name)
|
|
|
|
subscribed_streams = gather_subscriptions(self.nonreg_user(user_name))[0]
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(subscribed_streams, len(streams))
|
2017-11-16 22:12:31 +01:00
|
|
|
|
|
|
|
for x, y in zip(subscribed_streams, streams):
|
|
|
|
self.assertEqual(x["name"], y.name)
|
|
|
|
|
2022-10-31 22:13:26 +01:00
|
|
|
def resolve_topic_containing_message(
|
|
|
|
self,
|
|
|
|
acting_user: UserProfile,
|
|
|
|
target_message_id: int,
|
|
|
|
**extra: str,
|
|
|
|
) -> "TestHttpResponse":
|
|
|
|
"""
|
|
|
|
Mark all messages within the topic associated with message `target_message_id` as resolved.
|
|
|
|
"""
|
2024-03-22 06:45:17 +01:00
|
|
|
message = access_message(acting_user, target_message_id)
|
2022-10-31 22:13:26 +01:00
|
|
|
return self.api_patch(
|
|
|
|
acting_user,
|
|
|
|
f"/api/v1/messages/{target_message_id}",
|
|
|
|
{
|
|
|
|
"topic": RESOLVED_TOPIC_PREFIX + message.topic_name(),
|
|
|
|
"propagate_mode": "change_all",
|
|
|
|
},
|
|
|
|
**extra,
|
|
|
|
)
|
|
|
|
|
2020-08-23 19:09:27 +02:00
|
|
|
def send_webhook_payload(
|
|
|
|
self,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
url: str,
|
|
|
|
payload: Union[str, Dict[str, Any]],
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2020-08-23 19:09:27 +02:00
|
|
|
) -> Message:
|
|
|
|
"""
|
|
|
|
Send a webhook payload to the server, and verify that the
|
|
|
|
post is successful.
|
|
|
|
|
|
|
|
This is a pretty low-level function. For most use cases
|
|
|
|
see the helpers that call this function, which do additional
|
|
|
|
checks.
|
|
|
|
|
|
|
|
Occasionally tests will call this directly, for unique
|
|
|
|
situations like having multiple messages go to a stream,
|
|
|
|
where the other helper functions are a bit too rigid,
|
|
|
|
and you'll want the test itself do various assertions.
|
|
|
|
Even in those cases, you're often better to simply
|
|
|
|
call client_post and assert_json_success.
|
2020-08-23 19:30:12 +02:00
|
|
|
|
|
|
|
If the caller expects a message to be sent to a stream,
|
|
|
|
the caller should make sure the user is subscribed.
|
2020-08-23 19:09:27 +02:00
|
|
|
"""
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2019-01-28 20:57:54 +01:00
|
|
|
prior_msg = self.get_last_message()
|
|
|
|
|
2022-06-14 22:44:49 +02:00
|
|
|
result = self.client_post(
|
|
|
|
url,
|
|
|
|
payload,
|
|
|
|
skip_user_agent=False,
|
|
|
|
follow=False,
|
|
|
|
secure=False,
|
2023-07-19 22:57:31 +02:00
|
|
|
headers=None,
|
2022-06-14 22:44:49 +02:00
|
|
|
intentionally_undocumented=False,
|
|
|
|
**extra,
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Check the correct message was sent
|
|
|
|
msg = self.get_last_message()
|
2019-01-28 20:57:54 +01:00
|
|
|
|
|
|
|
if msg.id == prior_msg.id:
|
2021-06-30 09:46:14 +02:00
|
|
|
raise EmptyResponseError(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2019-01-28 20:57:54 +01:00
|
|
|
Your test code called an endpoint that did
|
|
|
|
not write any new messages. It is probably
|
|
|
|
broken (but still returns 200 due to exception
|
|
|
|
handling).
|
2020-08-23 19:30:12 +02:00
|
|
|
|
|
|
|
One possible gotcha is that you forgot to
|
|
|
|
subscribe the test user to the stream that
|
|
|
|
the webhook sends to.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
) # nocoverage
|
2019-01-28 20:57:54 +01:00
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
self.assertEqual(msg.sender.email, user_profile.email)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def get_last_message(self) -> Message:
|
2021-02-12 08:20:45 +01:00
|
|
|
return Message.objects.latest("id")
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def get_second_to_last_message(self) -> Message:
|
2021-02-12 08:20:45 +01:00
|
|
|
return Message.objects.all().order_by("-id")[1]
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
@contextmanager
|
2017-11-27 05:27:04 +01:00
|
|
|
def simulated_markdown_failure(self) -> Iterator[None]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-11-10 19:30:09 +01:00
|
|
|
This raises a failure inside of the try/except block of
|
2020-06-28 16:40:18 +02:00
|
|
|
markdown.__init__.do_convert.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2023-04-11 19:51:14 +02:00
|
|
|
with mock.patch(
|
2024-04-18 19:44:46 +02:00
|
|
|
"zerver.lib.markdown.unsafe_timeout", side_effect=subprocess.CalledProcessError(1, [])
|
2023-12-05 18:45:07 +01:00
|
|
|
), self.assertLogs(level="ERROR"): # For markdown_logger.exception
|
2016-11-10 19:30:09 +01:00
|
|
|
yield
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_default_device(
|
|
|
|
self, user_profile: UserProfile, number: str = "+12125550100"
|
|
|
|
) -> None:
|
|
|
|
phone_device = PhoneDevice(
|
|
|
|
user=user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
name="default",
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmed=True,
|
|
|
|
number=number,
|
2021-02-12 08:20:45 +01:00
|
|
|
key="abcd",
|
|
|
|
method="sms",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-13 13:42:57 +02:00
|
|
|
phone_device.save()
|
|
|
|
|
2019-04-04 12:03:54 +02:00
|
|
|
def rm_tree(self, path: str) -> None:
|
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2019-04-04 12:05:54 +02:00
|
|
|
def make_import_output_dir(self, exported_from: str) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
output_dir = tempfile.mkdtemp(
|
|
|
|
dir=settings.TEST_WORKER_DIR, prefix="test-" + exported_from + "-import-"
|
|
|
|
)
|
2019-04-04 12:05:54 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
return output_dir
|
|
|
|
|
2019-05-21 12:21:32 +02:00
|
|
|
def get_set(self, data: List[Dict[str, Any]], field: str) -> Set[str]:
|
2020-04-09 21:51:58 +02:00
|
|
|
values = {r[field] for r in data}
|
2019-05-21 12:21:32 +02:00
|
|
|
return values
|
|
|
|
|
2019-05-21 12:29:09 +02:00
|
|
|
def find_by_id(self, data: List[Dict[str, Any]], db_id: int) -> Dict[str, Any]:
|
2023-07-22 00:34:11 +02:00
|
|
|
[r] = (r for r in data if r["id"] == db_id)
|
|
|
|
return r
|
2019-05-21 12:29:09 +02:00
|
|
|
|
2019-10-16 18:01:38 +02:00
|
|
|
def init_default_ldap_database(self) -> None:
|
|
|
|
"""
|
|
|
|
Takes care of the mock_ldap setup, loads
|
|
|
|
a directory from zerver/tests/fixtures/ldap/directory.json with various entries
|
|
|
|
to be used by tests.
|
|
|
|
If a test wants to specify its own directory, it can just replace
|
|
|
|
self.mock_ldap.directory with its own content, but in most cases it should be
|
|
|
|
enough to use change_user_attr to make simple modifications to the pre-loaded
|
|
|
|
directory. If new user entries are needed to test for some additional unusual
|
|
|
|
scenario, it's most likely best to add that to directory.json.
|
|
|
|
"""
|
2020-08-07 01:09:47 +02:00
|
|
|
directory = orjson.loads(self.fixture_data("directory.json", type="ldap"))
|
2019-10-16 18:01:38 +02:00
|
|
|
|
2023-07-31 22:16:30 +02:00
|
|
|
for attrs in directory.values():
|
2021-02-12 08:20:45 +01:00
|
|
|
if "uid" in attrs:
|
2020-10-23 02:43:28 +02:00
|
|
|
# Generate a password for the LDAP account:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs["userPassword"] = [self.ldap_password(attrs["uid"][0])]
|
2020-02-19 19:40:49 +01:00
|
|
|
|
|
|
|
# Load binary attributes. If in "directory", an attribute as its value
|
|
|
|
# has a string starting with "file:", the rest of the string is assumed
|
|
|
|
# to be a path to the file from which binary data should be loaded,
|
2020-10-23 02:43:28 +02:00
|
|
|
# as the actual value of the attribute in LDAP.
|
2019-10-16 18:01:38 +02:00
|
|
|
for attr, value in attrs.items():
|
|
|
|
if isinstance(value, str) and value.startswith("file:"):
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(value[5:], "rb") as f:
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
attrs[attr] = [f.read()]
|
2019-10-16 18:01:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_patcher = mock.patch("django_auth_ldap.config.ldap.initialize")
|
2019-10-16 18:01:38 +02:00
|
|
|
self.mock_initialize = ldap_patcher.start()
|
|
|
|
self.mock_ldap = MockLDAP(directory)
|
|
|
|
self.mock_initialize.return_value = self.mock_ldap
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def change_ldap_user_attr(
|
|
|
|
self, username: str, attr_name: str, attr_value: Union[str, bytes], binary: bool = False
|
|
|
|
) -> None:
|
2019-10-16 18:01:38 +02:00
|
|
|
"""
|
|
|
|
Method for changing the value of an attribute of a user entry in the mock
|
|
|
|
directory. Use option binary=True if you want binary data to be loaded
|
|
|
|
into the attribute from a file specified at attr_value. This changes
|
|
|
|
the attribute only for the specific test function that calls this method,
|
|
|
|
and is isolated from other tests.
|
|
|
|
"""
|
2020-06-09 00:25:09 +02:00
|
|
|
dn = f"uid={username},ou=users,dc=zulip,dc=com"
|
2019-10-16 18:01:38 +02:00
|
|
|
if binary:
|
|
|
|
with open(attr_value, "rb") as f:
|
|
|
|
# attr_value should be a path to the file with the binary data
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Union[str, bytes] = f.read()
|
2019-10-16 18:01:38 +02:00
|
|
|
else:
|
|
|
|
data = attr_value
|
|
|
|
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
self.mock_ldap.directory[dn][attr_name] = [data]
|
2019-10-16 18:01:38 +02:00
|
|
|
|
2020-09-22 18:18:45 +02:00
|
|
|
def remove_ldap_user_attr(self, username: str, attr_name: str) -> None:
|
|
|
|
"""
|
|
|
|
Method for removing the value of an attribute of a user entry in the mock
|
|
|
|
directory. This changes the attribute only for the specific test function
|
|
|
|
that calls this method, and is isolated from other tests.
|
|
|
|
"""
|
|
|
|
dn = f"uid={username},ou=users,dc=zulip,dc=com"
|
|
|
|
self.mock_ldap.directory[dn].pop(attr_name, None)
|
|
|
|
|
2019-10-18 18:25:51 +02:00
|
|
|
def ldap_username(self, username: str) -> str:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Maps Zulip username to the name of the corresponding LDAP user
|
2019-10-18 18:25:51 +02:00
|
|
|
in our test directory at zerver/tests/fixtures/ldap/directory.json,
|
2020-10-23 02:43:28 +02:00
|
|
|
if the LDAP user exists.
|
2019-10-18 18:25:51 +02:00
|
|
|
"""
|
|
|
|
return self.example_user_ldap_username_map[username]
|
|
|
|
|
2020-02-19 19:40:49 +01:00
|
|
|
def ldap_password(self, uid: str) -> str:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{uid}_ldap_password"
|
2019-10-18 18:25:51 +02:00
|
|
|
|
2021-01-26 04:20:36 +01:00
|
|
|
def email_display_from(self, email_message: EmailMessage) -> str:
|
|
|
|
"""
|
|
|
|
Returns the email address that will show in email clients as the
|
|
|
|
"From" field.
|
|
|
|
"""
|
|
|
|
# The extra_headers field may contain a "From" which is used
|
|
|
|
# for display in email clients, and appears in the RFC822
|
|
|
|
# header as `From`. The `.from_email` accessor is the
|
|
|
|
# "envelope from" address, used by mail transfer agents if
|
|
|
|
# the email bounces.
|
|
|
|
return email_message.extra_headers.get("From", email_message.from_email)
|
|
|
|
|
|
|
|
def email_envelope_from(self, email_message: EmailMessage) -> str:
|
|
|
|
"""
|
|
|
|
Returns the email address that will be used if the email bounces.
|
|
|
|
"""
|
|
|
|
# See email_display_from, above.
|
|
|
|
return email_message.from_email
|
|
|
|
|
2021-04-06 18:07:33 +02:00
|
|
|
def check_has_permission_policies(
|
2021-04-13 16:01:40 +02:00
|
|
|
self, policy: str, validation_func: Callable[[UserProfile], bool]
|
2021-04-06 18:07:33 +02:00
|
|
|
) -> None:
|
2021-04-13 16:01:40 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
2023-08-19 19:12:01 +02:00
|
|
|
owner = "desdemona"
|
|
|
|
admin = "iago"
|
|
|
|
moderator = "shiva"
|
|
|
|
member = "hamlet"
|
|
|
|
new_member = "othello"
|
|
|
|
guest = "polonius"
|
2021-04-06 18:07:33 +02:00
|
|
|
|
2023-08-19 19:12:01 +02:00
|
|
|
def set_age(user_name: str, age: int) -> None:
|
|
|
|
user = self.example_user(user_name)
|
2023-11-28 19:33:10 +01:00
|
|
|
user.date_joined = timezone_now() - timedelta(days=age)
|
2023-08-19 19:12:01 +02:00
|
|
|
user.save()
|
|
|
|
|
|
|
|
do_set_realm_property(realm, "waiting_period_threshold", 1000, acting_user=None)
|
|
|
|
set_age(member, age=realm.waiting_period_threshold + 1)
|
|
|
|
set_age(new_member, age=realm.waiting_period_threshold - 1)
|
|
|
|
|
|
|
|
def allow(user_name: str) -> None:
|
|
|
|
# Fetch a clean object for the user.
|
|
|
|
user = self.example_user(user_name)
|
|
|
|
with self.assert_database_query_count(0):
|
|
|
|
self.assertTrue(validation_func(user))
|
|
|
|
|
|
|
|
def prevent(user_name: str) -> None:
|
|
|
|
# Fetch a clean object for the user.
|
|
|
|
user = self.example_user(user_name)
|
|
|
|
with self.assert_database_query_count(0):
|
|
|
|
self.assertFalse(validation_func(user))
|
|
|
|
|
|
|
|
def set_policy(level: int) -> None:
|
|
|
|
do_set_realm_property(realm, policy, level, acting_user=None)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_NOBODY)
|
|
|
|
prevent(owner)
|
|
|
|
prevent(admin)
|
|
|
|
prevent(moderator)
|
|
|
|
prevent(member)
|
|
|
|
prevent(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_OWNERS_ONLY)
|
|
|
|
allow(owner)
|
|
|
|
prevent(admin)
|
|
|
|
prevent(moderator)
|
|
|
|
prevent(member)
|
|
|
|
prevent(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_ADMINS_ONLY)
|
|
|
|
allow(owner)
|
|
|
|
allow(admin)
|
|
|
|
prevent(moderator)
|
|
|
|
prevent(member)
|
|
|
|
prevent(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_MODERATORS_ONLY)
|
|
|
|
allow(owner)
|
|
|
|
allow(admin)
|
|
|
|
allow(moderator)
|
|
|
|
prevent(member)
|
|
|
|
prevent(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_FULL_MEMBERS_ONLY)
|
|
|
|
allow(owner)
|
|
|
|
allow(admin)
|
|
|
|
allow(moderator)
|
|
|
|
allow(member)
|
|
|
|
prevent(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_MEMBERS_ONLY)
|
|
|
|
allow(owner)
|
|
|
|
allow(admin)
|
|
|
|
allow(moderator)
|
|
|
|
allow(member)
|
|
|
|
allow(new_member)
|
|
|
|
prevent(guest)
|
|
|
|
|
|
|
|
set_policy(Realm.POLICY_EVERYONE)
|
|
|
|
allow(owner)
|
|
|
|
allow(admin)
|
|
|
|
allow(moderator)
|
|
|
|
allow(member)
|
|
|
|
allow(new_member)
|
|
|
|
allow(guest)
|
2021-10-05 09:33:35 +02:00
|
|
|
|
2021-06-03 12:20:31 +02:00
|
|
|
def subscribe_realm_to_manual_license_management_plan(
|
|
|
|
self, realm: Realm, licenses: int, licenses_at_next_renewal: int, billing_schedule: int
|
|
|
|
) -> Tuple[CustomerPlan, LicenseLedger]:
|
|
|
|
customer, _ = Customer.objects.get_or_create(realm=realm)
|
|
|
|
plan = CustomerPlan.objects.create(
|
|
|
|
customer=customer,
|
|
|
|
automanage_licenses=False,
|
|
|
|
billing_cycle_anchor=timezone_now(),
|
|
|
|
billing_schedule=billing_schedule,
|
2023-11-30 07:43:06 +01:00
|
|
|
tier=CustomerPlan.TIER_CLOUD_STANDARD,
|
2021-06-03 12:20:31 +02:00
|
|
|
)
|
|
|
|
ledger = LicenseLedger.objects.create(
|
|
|
|
plan=plan,
|
|
|
|
is_renewal=True,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
licenses=licenses,
|
|
|
|
licenses_at_next_renewal=licenses_at_next_renewal,
|
|
|
|
)
|
2021-10-18 23:28:17 +02:00
|
|
|
realm.plan_type = Realm.PLAN_TYPE_STANDARD
|
2021-06-03 12:20:31 +02:00
|
|
|
realm.save(update_fields=["plan_type"])
|
|
|
|
return plan, ledger
|
|
|
|
|
|
|
|
def subscribe_realm_to_monthly_plan_on_manual_license_management(
|
|
|
|
self, realm: Realm, licenses: int, licenses_at_next_renewal: int
|
|
|
|
) -> Tuple[CustomerPlan, LicenseLedger]:
|
|
|
|
return self.subscribe_realm_to_manual_license_management_plan(
|
2023-11-30 07:55:53 +01:00
|
|
|
realm, licenses, licenses_at_next_renewal, CustomerPlan.BILLING_SCHEDULE_MONTHLY
|
2021-06-03 12:20:31 +02:00
|
|
|
)
|
|
|
|
|
2021-06-23 10:44:34 +02:00
|
|
|
def create_user_notifications_data_object(
|
|
|
|
self, *, user_id: int, **kwargs: Any
|
|
|
|
) -> UserMessageNotificationsData:
|
2021-06-15 14:30:51 +02:00
|
|
|
return UserMessageNotificationsData(
|
2021-06-23 10:44:34 +02:00
|
|
|
user_id=user_id,
|
2021-06-15 14:30:51 +02:00
|
|
|
online_push_enabled=kwargs.get("online_push_enabled", False),
|
2023-08-04 22:08:17 +02:00
|
|
|
dm_email_notify=kwargs.get("dm_email_notify", False),
|
|
|
|
dm_push_notify=kwargs.get("dm_push_notify", False),
|
notifications: Calculate PMs/mentions settings like other settings.
Previously, we checked for the `enable_offline_email_notifications` and
`enable_offline_push_notifications` settings (which determine whether the
user will receive notifications for PMs and mentions) just before sending
notifications. This has a few problem:
1. We do not have access to all the user settings in the notification
handlers (`handle_missedmessage_emails` and `handle_push_notifications`),
and therefore, we cannot correctly determine whether the notification should
be sent. Checks like the following which existed previously, will, for
example, incorrectly not send notifications even when stream email
notifications are enabled-
```
if not receives_offline_email_notifications(user_profile):
return
```
With this commit, we simply do not enqueue notifications if the "offline"
settings are disabled, which fixes that bug.
Additionally, this also fixes a bug with the "online push notifications"
feature, which was, if someone were to:
* turn off notifications for PMs and mentions (`enable_offline_push_notifications`)
* turn on stream push notifications (`enable_stream_push_notifications`)
* turn on "online push" (`enable_online_push_notifications`)
then, they would still receive notifications for PMs when online.
This isn't how the "online push enabled" feature is supposed to work;
it should only act as a wrapper around the other notification settings.
The buggy code was this in `handle_push_notifications`:
```
if not (
receives_offline_push_notifications(user_profile)
or receives_online_push_notifications(user_profile)
):
return
// send notifications
```
This commit removes that code, and extends our `notification_data.py` logic
to cover this case, along with tests.
2. The name for these settings is slightly misleading. They essentially
talk about "what to send notifications for" (PMs and mentions), and not
"when to send notifications" (offline). This commit improves this condition
by restricting the use of this term only to the database field, and using
clearer names everywhere else. This distinction will be important to have
non-confusing code when we implement multiple options for notifications
in the future as dropdown (never/when offline/when offline or online, etc).
3. We should ideally re-check all notification settings just before the
notifications are sent. This is especially important for email notifications,
which may be sent after a long time after the message was sent. We will
in the future add code to thoroughly re-check settings before sending
notifications in a clean manner, but temporarily not re-checking isn't
a terrible scenario either.
2021-07-14 15:34:01 +02:00
|
|
|
mention_email_notify=kwargs.get("mention_email_notify", False),
|
|
|
|
mention_push_notify=kwargs.get("mention_push_notify", False),
|
2023-06-07 19:19:33 +02:00
|
|
|
topic_wildcard_mention_email_notify=kwargs.get(
|
|
|
|
"topic_wildcard_mention_email_notify", False
|
|
|
|
),
|
|
|
|
topic_wildcard_mention_push_notify=kwargs.get(
|
|
|
|
"topic_wildcard_mention_push_notify", False
|
|
|
|
),
|
2023-06-03 16:51:38 +02:00
|
|
|
stream_wildcard_mention_email_notify=kwargs.get(
|
|
|
|
"stream_wildcard_mention_email_notify", False
|
|
|
|
),
|
|
|
|
stream_wildcard_mention_push_notify=kwargs.get(
|
|
|
|
"stream_wildcard_mention_push_notify", False
|
|
|
|
),
|
2021-06-15 14:30:51 +02:00
|
|
|
stream_email_notify=kwargs.get("stream_email_notify", False),
|
|
|
|
stream_push_notify=kwargs.get("stream_push_notify", False),
|
2023-05-17 16:01:16 +02:00
|
|
|
followed_topic_email_notify=kwargs.get("followed_topic_email_notify", False),
|
2023-05-28 17:03:04 +02:00
|
|
|
followed_topic_push_notify=kwargs.get("followed_topic_push_notify", False),
|
2023-06-07 19:19:33 +02:00
|
|
|
topic_wildcard_mention_in_followed_topic_email_notify=kwargs.get(
|
|
|
|
"topic_wildcard_mention_in_followed_topic_email_notify", False
|
|
|
|
),
|
|
|
|
topic_wildcard_mention_in_followed_topic_push_notify=kwargs.get(
|
|
|
|
"topic_wildcard_mention_in_followed_topic_push_notify", False
|
|
|
|
),
|
2023-06-03 16:51:38 +02:00
|
|
|
stream_wildcard_mention_in_followed_topic_email_notify=kwargs.get(
|
|
|
|
"stream_wildcard_mention_in_followed_topic_email_notify", False
|
2023-06-02 09:42:58 +02:00
|
|
|
),
|
2023-06-03 16:51:38 +02:00
|
|
|
stream_wildcard_mention_in_followed_topic_push_notify=kwargs.get(
|
|
|
|
"stream_wildcard_mention_in_followed_topic_push_notify", False
|
2023-06-02 09:42:58 +02:00
|
|
|
),
|
2021-06-15 14:30:51 +02:00
|
|
|
sender_is_muted=kwargs.get("sender_is_muted", False),
|
2022-10-22 13:25:06 +02:00
|
|
|
disable_external_notifications=kwargs.get("disable_external_notifications", False),
|
2021-06-15 14:30:51 +02:00
|
|
|
)
|
|
|
|
|
maybe_enqueue_notifications: Take in notification_data dataclass.
* Modify `maybe_enqueue_notifications` to take in an instance of the
dataclass introduced in 951b49c048ba3464e74ad7965da3453fe36d0a96.
* The `check_notify` tests tested the "when to notify" logic in a way
which involved `maybe_enqueue_notifications`. To simplify things, we've
earlier extracted this logic in 8182632d7e9f8490b9b9295e01b5912dcf173fd5.
So, we just kill off the `check_notify` test, and keep only those parts
which verify the queueing and return value behavior of that funtion.
* We retain the the missedmessage_hook and message
message_edit_notifications since they are more integration-style.
* There's a slightly subtle change with the missedmessage_hook tests.
Before this commit, we short-circuited the hook if the sender was muted
(5a642cea115be159175d1189f83ba25d2c5c7632).
With this commit, we delegate the check to our dataclass methods.
So, `maybe_enqueue_notifications` will be called even if the sender was
muted, and the test needs to be updated.
* In our test helper `get_maybe_enqueue_notifications_parameters` which
generates default values for testing `maybe_enqueue_notifications` calls,
we keep `message_id`, `sender_id`, and `user_id` as required arguments,
so that the tests are super-clear and avoid accidental false positives.
* Because `do_update_embedded_data` also sends `update_message` events,
we deal with that case with some hacky code for now. See the comment
there.
This mostly completes the extraction of the "when to notify" logic into
our new `notification_data` module.
2021-06-23 14:12:32 +02:00
|
|
|
def get_maybe_enqueue_notifications_parameters(
|
2021-06-25 13:58:53 +02:00
|
|
|
self, *, message_id: int, user_id: int, acting_user_id: int, **kwargs: Any
|
maybe_enqueue_notifications: Take in notification_data dataclass.
* Modify `maybe_enqueue_notifications` to take in an instance of the
dataclass introduced in 951b49c048ba3464e74ad7965da3453fe36d0a96.
* The `check_notify` tests tested the "when to notify" logic in a way
which involved `maybe_enqueue_notifications`. To simplify things, we've
earlier extracted this logic in 8182632d7e9f8490b9b9295e01b5912dcf173fd5.
So, we just kill off the `check_notify` test, and keep only those parts
which verify the queueing and return value behavior of that funtion.
* We retain the the missedmessage_hook and message
message_edit_notifications since they are more integration-style.
* There's a slightly subtle change with the missedmessage_hook tests.
Before this commit, we short-circuited the hook if the sender was muted
(5a642cea115be159175d1189f83ba25d2c5c7632).
With this commit, we delegate the check to our dataclass methods.
So, `maybe_enqueue_notifications` will be called even if the sender was
muted, and the test needs to be updated.
* In our test helper `get_maybe_enqueue_notifications_parameters` which
generates default values for testing `maybe_enqueue_notifications` calls,
we keep `message_id`, `sender_id`, and `user_id` as required arguments,
so that the tests are super-clear and avoid accidental false positives.
* Because `do_update_embedded_data` also sends `update_message` events,
we deal with that case with some hacky code for now. See the comment
there.
This mostly completes the extraction of the "when to notify" logic into
our new `notification_data` module.
2021-06-23 14:12:32 +02:00
|
|
|
) -> Dict[str, Any]:
|
2021-06-08 15:00:11 +02:00
|
|
|
"""
|
|
|
|
Returns a dictionary with the passed parameters, after filling up the
|
|
|
|
missing data with default values, for testing what was passed to the
|
|
|
|
`maybe_enqueue_notifications` method.
|
|
|
|
"""
|
maybe_enqueue_notifications: Take in notification_data dataclass.
* Modify `maybe_enqueue_notifications` to take in an instance of the
dataclass introduced in 951b49c048ba3464e74ad7965da3453fe36d0a96.
* The `check_notify` tests tested the "when to notify" logic in a way
which involved `maybe_enqueue_notifications`. To simplify things, we've
earlier extracted this logic in 8182632d7e9f8490b9b9295e01b5912dcf173fd5.
So, we just kill off the `check_notify` test, and keep only those parts
which verify the queueing and return value behavior of that funtion.
* We retain the the missedmessage_hook and message
message_edit_notifications since they are more integration-style.
* There's a slightly subtle change with the missedmessage_hook tests.
Before this commit, we short-circuited the hook if the sender was muted
(5a642cea115be159175d1189f83ba25d2c5c7632).
With this commit, we delegate the check to our dataclass methods.
So, `maybe_enqueue_notifications` will be called even if the sender was
muted, and the test needs to be updated.
* In our test helper `get_maybe_enqueue_notifications_parameters` which
generates default values for testing `maybe_enqueue_notifications` calls,
we keep `message_id`, `sender_id`, and `user_id` as required arguments,
so that the tests are super-clear and avoid accidental false positives.
* Because `do_update_embedded_data` also sends `update_message` events,
we deal with that case with some hacky code for now. See the comment
there.
This mostly completes the extraction of the "when to notify" logic into
our new `notification_data` module.
2021-06-23 14:12:32 +02:00
|
|
|
user_notifications_data = self.create_user_notifications_data_object(
|
|
|
|
user_id=user_id, **kwargs
|
|
|
|
)
|
|
|
|
return dict(
|
2021-06-25 14:08:41 +02:00
|
|
|
user_notifications_data=user_notifications_data,
|
maybe_enqueue_notifications: Take in notification_data dataclass.
* Modify `maybe_enqueue_notifications` to take in an instance of the
dataclass introduced in 951b49c048ba3464e74ad7965da3453fe36d0a96.
* The `check_notify` tests tested the "when to notify" logic in a way
which involved `maybe_enqueue_notifications`. To simplify things, we've
earlier extracted this logic in 8182632d7e9f8490b9b9295e01b5912dcf173fd5.
So, we just kill off the `check_notify` test, and keep only those parts
which verify the queueing and return value behavior of that funtion.
* We retain the the missedmessage_hook and message
message_edit_notifications since they are more integration-style.
* There's a slightly subtle change with the missedmessage_hook tests.
Before this commit, we short-circuited the hook if the sender was muted
(5a642cea115be159175d1189f83ba25d2c5c7632).
With this commit, we delegate the check to our dataclass methods.
So, `maybe_enqueue_notifications` will be called even if the sender was
muted, and the test needs to be updated.
* In our test helper `get_maybe_enqueue_notifications_parameters` which
generates default values for testing `maybe_enqueue_notifications` calls,
we keep `message_id`, `sender_id`, and `user_id` as required arguments,
so that the tests are super-clear and avoid accidental false positives.
* Because `do_update_embedded_data` also sends `update_message` events,
we deal with that case with some hacky code for now. See the comment
there.
This mostly completes the extraction of the "when to notify" logic into
our new `notification_data` module.
2021-06-23 14:12:32 +02:00
|
|
|
message_id=message_id,
|
2021-06-25 13:58:53 +02:00
|
|
|
acting_user_id=acting_user_id,
|
2021-07-01 17:40:16 +02:00
|
|
|
mentioned_user_group_id=kwargs.get("mentioned_user_group_id", None),
|
maybe_enqueue_notifications: Take in notification_data dataclass.
* Modify `maybe_enqueue_notifications` to take in an instance of the
dataclass introduced in 951b49c048ba3464e74ad7965da3453fe36d0a96.
* The `check_notify` tests tested the "when to notify" logic in a way
which involved `maybe_enqueue_notifications`. To simplify things, we've
earlier extracted this logic in 8182632d7e9f8490b9b9295e01b5912dcf173fd5.
So, we just kill off the `check_notify` test, and keep only those parts
which verify the queueing and return value behavior of that funtion.
* We retain the the missedmessage_hook and message
message_edit_notifications since they are more integration-style.
* There's a slightly subtle change with the missedmessage_hook tests.
Before this commit, we short-circuited the hook if the sender was muted
(5a642cea115be159175d1189f83ba25d2c5c7632).
With this commit, we delegate the check to our dataclass methods.
So, `maybe_enqueue_notifications` will be called even if the sender was
muted, and the test needs to be updated.
* In our test helper `get_maybe_enqueue_notifications_parameters` which
generates default values for testing `maybe_enqueue_notifications` calls,
we keep `message_id`, `sender_id`, and `user_id` as required arguments,
so that the tests are super-clear and avoid accidental false positives.
* Because `do_update_embedded_data` also sends `update_message` events,
we deal with that case with some hacky code for now. See the comment
there.
This mostly completes the extraction of the "when to notify" logic into
our new `notification_data` module.
2021-06-23 14:12:32 +02:00
|
|
|
idle=kwargs.get("idle", True),
|
|
|
|
already_notified=kwargs.get(
|
|
|
|
"already_notified", {"email_notified": False, "push_notified": False}
|
|
|
|
),
|
2021-06-08 15:00:11 +02:00
|
|
|
)
|
|
|
|
|
2021-12-05 11:43:34 +01:00
|
|
|
def verify_emoji_code_foreign_keys(self) -> None:
|
|
|
|
"""
|
|
|
|
DB tables that refer to RealmEmoji use int(emoji_code) as the
|
|
|
|
foreign key. Those tables tend to de-normalize emoji_name due
|
2022-02-08 00:13:33 +01:00
|
|
|
to our inheritance-based setup. This helper makes sure those
|
2021-12-05 11:43:34 +01:00
|
|
|
invariants are intact, which is particularly tricky during
|
|
|
|
the import/export process (or during conversions from things
|
|
|
|
like Slack/RocketChat/MatterMost/etc.).
|
|
|
|
"""
|
|
|
|
dct = {}
|
|
|
|
|
2022-06-15 04:24:28 +02:00
|
|
|
for realm_emoji in RealmEmoji.objects.all():
|
|
|
|
dct[realm_emoji.id] = realm_emoji
|
2021-12-05 11:43:34 +01:00
|
|
|
|
|
|
|
if not dct:
|
|
|
|
raise AssertionError("test needs RealmEmoji rows")
|
|
|
|
|
|
|
|
count = 0
|
2022-06-15 04:24:28 +02:00
|
|
|
for reaction in Reaction.objects.filter(reaction_type=Reaction.REALM_EMOJI):
|
|
|
|
realm_emoji_id = int(reaction.emoji_code)
|
2021-12-05 11:43:34 +01:00
|
|
|
assert realm_emoji_id in dct
|
2022-06-15 04:24:28 +02:00
|
|
|
self.assertEqual(dct[realm_emoji_id].name, reaction.emoji_name)
|
|
|
|
self.assertEqual(dct[realm_emoji_id].realm_id, reaction.user_profile.realm_id)
|
2021-12-05 11:43:34 +01:00
|
|
|
count += 1
|
|
|
|
|
2022-06-15 04:24:28 +02:00
|
|
|
for user_status in UserStatus.objects.filter(reaction_type=UserStatus.REALM_EMOJI):
|
|
|
|
realm_emoji_id = int(user_status.emoji_code)
|
2021-12-05 13:42:04 +01:00
|
|
|
assert realm_emoji_id in dct
|
2022-06-15 04:24:28 +02:00
|
|
|
self.assertEqual(dct[realm_emoji_id].name, user_status.emoji_name)
|
|
|
|
self.assertEqual(dct[realm_emoji_id].realm_id, user_status.user_profile.realm_id)
|
2021-12-05 13:42:04 +01:00
|
|
|
count += 1
|
|
|
|
|
2021-12-05 11:43:34 +01:00
|
|
|
if count == 0:
|
|
|
|
raise AssertionError("test is meaningless without any pertinent rows")
|
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
def check_user_added_in_system_group(self, user: UserProfile) -> None:
|
|
|
|
user_group = get_system_user_group_for_user(user)
|
|
|
|
self.assertTrue(
|
|
|
|
UserGroupMembership.objects.filter(user_profile=user, user_group=user_group).exists()
|
|
|
|
)
|
|
|
|
|
2023-08-19 15:49:00 +02:00
|
|
|
def _assert_long_term_idle(self, user: UserProfile) -> None:
|
2022-04-15 04:51:41 +02:00
|
|
|
if not user.long_term_idle:
|
2023-08-19 15:49:00 +02:00
|
|
|
raise AssertionError(
|
|
|
|
"""
|
|
|
|
We expect you to explicitly call self.soft_deactivate_user
|
|
|
|
if your user is not already soft-deactivated.
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
|
|
|
|
def expect_soft_reactivation(self, user: UserProfile, action: Callable[[], None]) -> None:
|
|
|
|
self._assert_long_term_idle(user)
|
|
|
|
action()
|
|
|
|
# Prevent from using the old user object
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertEqual(user.long_term_idle, False)
|
|
|
|
|
|
|
|
def expect_to_stay_long_term_idle(self, user: UserProfile, action: Callable[[], None]) -> None:
|
|
|
|
self._assert_long_term_idle(user)
|
|
|
|
action()
|
|
|
|
# Prevent from using the old user object
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertEqual(user.long_term_idle, True)
|
|
|
|
|
|
|
|
def soft_deactivate_user(self, user: UserProfile) -> None:
|
|
|
|
do_soft_deactivate_users([user])
|
|
|
|
assert user.long_term_idle
|
2022-04-15 04:51:41 +02:00
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
def set_up_db_for_testing_user_access(self) -> None:
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
prospero = self.example_user("prospero")
|
|
|
|
aaron = self.example_user("aaron")
|
|
|
|
zoe = self.example_user("ZOE")
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
# Polonius is subscribed to "Verona" by default, so we unsubscribe
|
|
|
|
# it so that it becomes easier to test the restricted access.
|
|
|
|
self.unsubscribe(polonius, "Verona")
|
|
|
|
|
|
|
|
self.make_stream("test_stream1")
|
|
|
|
self.make_stream("test_stream2", invite_only=True)
|
|
|
|
|
|
|
|
self.subscribe(othello, "test_stream1")
|
|
|
|
self.send_stream_message(othello, "test_stream1", content="test message", topic_name="test")
|
|
|
|
self.unsubscribe(othello, "test_stream1")
|
|
|
|
|
|
|
|
self.subscribe(polonius, "test_stream1")
|
|
|
|
self.subscribe(polonius, "test_stream2")
|
|
|
|
self.subscribe(hamlet, "test_stream1")
|
|
|
|
self.subscribe(iago, "test_stream2")
|
|
|
|
|
|
|
|
self.send_personal_message(polonius, prospero)
|
|
|
|
self.send_personal_message(shiva, polonius)
|
|
|
|
self.send_huddle_message(aaron, [polonius, zoe])
|
|
|
|
|
2024-04-02 18:39:18 +02:00
|
|
|
members_group = NamedUserGroup.objects.get(name="role:members", realm=realm)
|
2023-03-27 16:32:30 +02:00
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm, "can_access_all_users_group", members_group, acting_user=None
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-03 22:26:19 +02:00
|
|
|
class ZulipTestCase(ZulipTestCaseMixin, TestCase):
|
|
|
|
@contextmanager
|
|
|
|
def capture_send_event_calls(
|
|
|
|
self, expected_num_events: int
|
|
|
|
) -> Iterator[List[Mapping[str, Any]]]:
|
|
|
|
lst: List[Mapping[str, Any]] = []
|
|
|
|
|
|
|
|
# process_notification takes a single parameter called 'notice'.
|
|
|
|
# lst.append takes a single argument called 'object'.
|
|
|
|
# Some code might call process_notification using keyword arguments,
|
|
|
|
# so mypy doesn't allow assigning lst.append to process_notification
|
|
|
|
# So explicitly change parameter name to 'notice' to work around this problem
|
2024-03-01 02:56:37 +01:00
|
|
|
with mock.patch("zerver.tornado.event_queue.process_notification", lst.append):
|
2023-07-03 22:26:19 +02:00
|
|
|
# Some `send_event` calls need to be executed only after the current transaction
|
|
|
|
# commits (using `on_commit` hooks). Because the transaction in Django tests never
|
|
|
|
# commits (rather, gets rolled back after the test completes), such events would
|
|
|
|
# never be sent in tests, and we would be unable to verify them. Hence, we use
|
|
|
|
# this helper to make sure the `send_event` calls actually run.
|
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
yield lst
|
|
|
|
|
|
|
|
self.assert_length(lst, expected_num_events)
|
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
@override
|
|
|
|
def send_personal_message(
|
|
|
|
self,
|
|
|
|
from_user: UserProfile,
|
|
|
|
to_user: UserProfile,
|
|
|
|
content: str = "test content",
|
|
|
|
*,
|
|
|
|
read_by_sender: bool = True,
|
|
|
|
skip_capture_on_commit_callbacks: bool = False,
|
|
|
|
) -> int:
|
|
|
|
"""This function is a wrapper on 'send_personal_message',
|
|
|
|
defined in 'ZulipTestCaseMixin' with an extra parameter
|
|
|
|
'skip_capture_on_commit_callbacks'.
|
|
|
|
|
|
|
|
It should be set to 'True' when making a call with either
|
|
|
|
'verify_action' or 'capture_send_event_calls' as context manager
|
|
|
|
because they already have 'self.captureOnCommitCallbacks'
|
|
|
|
(See the comment in 'capture_send_event_calls').
|
|
|
|
|
|
|
|
For all other cases, we should call 'send_personal_message' with
|
|
|
|
'self.captureOnCommitCallbacks' for 'send_event_on_commit' or/and
|
|
|
|
'queue_event_on_commit' to work.
|
|
|
|
"""
|
|
|
|
if skip_capture_on_commit_callbacks:
|
|
|
|
message_id = super().send_personal_message(
|
|
|
|
from_user,
|
|
|
|
to_user,
|
|
|
|
content,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
message_id = super().send_personal_message(
|
|
|
|
from_user,
|
|
|
|
to_user,
|
|
|
|
content,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
return message_id
|
|
|
|
|
|
|
|
@override
|
|
|
|
def send_huddle_message(
|
|
|
|
self,
|
|
|
|
from_user: UserProfile,
|
|
|
|
to_users: List[UserProfile],
|
|
|
|
content: str = "test content",
|
|
|
|
*,
|
|
|
|
read_by_sender: bool = True,
|
|
|
|
skip_capture_on_commit_callbacks: bool = False,
|
|
|
|
) -> int:
|
|
|
|
"""This function is a wrapper on 'send_huddle_message',
|
|
|
|
defined in 'ZulipTestCaseMixin' with an extra parameter
|
|
|
|
'skip_capture_on_commit_callbacks'.
|
|
|
|
|
|
|
|
It should be set to 'True' when making a call with either
|
|
|
|
'verify_action' or 'capture_send_event_calls' as context manager
|
|
|
|
because they already have 'self.captureOnCommitCallbacks'
|
|
|
|
(See the comment in 'capture_send_event_calls').
|
|
|
|
|
|
|
|
For all other cases, we should call 'send_huddle_message' with
|
|
|
|
'self.captureOnCommitCallbacks' for 'send_event_on_commit' or/and
|
|
|
|
'queue_event_on_commit' to work.
|
|
|
|
"""
|
|
|
|
if skip_capture_on_commit_callbacks:
|
|
|
|
message_id = super().send_huddle_message(
|
|
|
|
from_user,
|
|
|
|
to_users,
|
|
|
|
content,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
message_id = super().send_huddle_message(
|
|
|
|
from_user,
|
|
|
|
to_users,
|
|
|
|
content,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
return message_id
|
|
|
|
|
|
|
|
@override
|
|
|
|
def send_stream_message(
|
|
|
|
self,
|
|
|
|
sender: UserProfile,
|
|
|
|
stream_name: str,
|
|
|
|
content: str = "test content",
|
|
|
|
topic_name: str = "test",
|
|
|
|
recipient_realm: Optional[Realm] = None,
|
|
|
|
*,
|
|
|
|
allow_unsubscribed_sender: bool = False,
|
|
|
|
read_by_sender: bool = True,
|
|
|
|
skip_capture_on_commit_callbacks: bool = False,
|
|
|
|
) -> int:
|
|
|
|
"""This function is a wrapper on 'send_stream_message',
|
|
|
|
defined in 'ZulipTestCaseMixin' with an extra parameter
|
|
|
|
'skip_capture_on_commit_callbacks'.
|
|
|
|
|
|
|
|
It should be set to 'True' when making a call with either
|
|
|
|
'verify_action' or 'capture_send_event_calls' as context manager
|
|
|
|
because they already have 'self.captureOnCommitCallbacks'
|
|
|
|
(See the comment in 'capture_send_event_calls').
|
|
|
|
|
|
|
|
For all other cases, we should call 'send_stream_message' with
|
|
|
|
'self.captureOnCommitCallbacks' for 'send_event_on_commit' or/and
|
|
|
|
'queue_event_on_commit' to work.
|
|
|
|
"""
|
|
|
|
if skip_capture_on_commit_callbacks:
|
|
|
|
message_id = super().send_stream_message(
|
|
|
|
sender,
|
|
|
|
stream_name,
|
|
|
|
content,
|
|
|
|
topic_name,
|
|
|
|
recipient_realm,
|
|
|
|
allow_unsubscribed_sender=allow_unsubscribed_sender,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
message_id = super().send_stream_message(
|
|
|
|
sender,
|
|
|
|
stream_name,
|
|
|
|
content,
|
|
|
|
topic_name,
|
|
|
|
recipient_realm,
|
|
|
|
allow_unsubscribed_sender=allow_unsubscribed_sender,
|
|
|
|
read_by_sender=read_by_sender,
|
|
|
|
)
|
|
|
|
return message_id
|
|
|
|
|
2023-07-03 22:26:19 +02:00
|
|
|
|
user_groups: Make locks required for updating user group memberships.
**Background**
User groups are expected to comply with the DAG constraint for the
many-to-many inter-group membership. The check for this constraint has
to be performed recursively so that we can find all direct and indirect
subgroups of the user group to be added.
This kind of check is vulnerable to phantom reads which is possible at
the default read committed isolation level because we cannot guarantee
that the check is still valid when we are adding the subgroups to the
user group.
**Solution**
To avoid having another transaction concurrently update one of the
to-be-subgroup after the recursive check is done, and before the subgroup
is added, we use SELECT FOR UPDATE to lock the user group rows.
The lock needs to be acquired before a group membership change is about
to occur before any check has been conducted.
Suppose that we are adding subgroup B to supergroup A, the locking protocol
is specified as follows:
1. Acquire a lock for B and all its direct and indirect subgroups.
2. Acquire a lock for A.
For the removal of user groups, we acquire a lock for the user group to
be removed with all its direct and indirect subgroups. This is the special
case A=B, which is still complaint with the protocol.
**Error handling**
We currently rely on Postgres' deadlock detection to abort transactions
and show an error for the users. In the future, we might need some
recovery mechanism or at least better error handling.
**Notes**
An important note is that we need to reuse the recursive CTE query that
finds the direct and indirect subgroups when applying the lock on the
rows. And the lock needs to be acquired the same way for the addition and
removal of direct subgroups.
User membership change (as opposed to user group membership) is not
affected. Read-only queries aren't either. The locks only protect
critical regions where the user group dependency graph might violate
the DAG constraint, where users are not participating.
**Testing**
We implement a transaction test case targeting some typical scenarios
when an internal server error is expected to happen (this means that the
user group view makes the correct decision to abort the transaction when
something goes wrong with locks).
To achieve this, we add a development view intended only for unit tests.
It has a global BARRIER that can be shared across threads, so that we
can synchronize them to consistently reproduce certain potential race
conditions prevented by the database locks.
The transaction test case lanuches pairs of threads initiating possibly
conflicting requests at the same time. The tests are set up such that exactly N
of them are expected to succeed with a certain error message (while we don't
know each one).
**Security notes**
get_recursive_subgroups_for_groups will no longer fetch user groups from
other realms. As a result, trying to add/remove a subgroup from another
realm results in a UserGroup not found error response.
We also implement subgroup-specific checks in has_user_group_access to
keep permission managing in a single place. Do note that the API
currently don't have a way to violate that check because we are only
checking the realm ID now.
2023-06-17 04:39:52 +02:00
|
|
|
def get_row_ids_in_all_tables() -> Iterator[Tuple[str, Set[int]]]:
|
2023-07-03 22:26:19 +02:00
|
|
|
all_models = apps.get_models(include_auto_created=True)
|
|
|
|
ignored_tables = {"django_session"}
|
|
|
|
|
|
|
|
for model in all_models:
|
|
|
|
table_name = model._meta.db_table
|
|
|
|
if table_name in ignored_tables:
|
|
|
|
continue
|
2023-09-05 20:25:23 +02:00
|
|
|
ids = model._default_manager.all().values_list("id", flat=True)
|
2023-07-03 22:26:19 +02:00
|
|
|
yield table_name, set(ids)
|
|
|
|
|
|
|
|
|
|
|
|
class ZulipTransactionTestCase(ZulipTestCaseMixin, TransactionTestCase):
|
|
|
|
"""The default Django TestCase wraps each test in a transaction. This
|
|
|
|
is invaluable for being able to rollback the transaction and thus
|
|
|
|
efficiently do many tests containing database changes, but it
|
|
|
|
prevents testing certain transaction-related races and locking
|
|
|
|
bugs.
|
|
|
|
|
|
|
|
This test class is intended to be used (sparingly!) for tests that
|
|
|
|
need to verify transaction related behavior, like locking with
|
|
|
|
select_for_update or transaction.atomic(durable=True).
|
|
|
|
|
|
|
|
Unlike ZulipTestCase, ZulipTransactionTestCase has the following traits:
|
|
|
|
1. Does not offer isolation between tests by wrapping them inside an atomic transaction.
|
|
|
|
2. Changes are committed to the current worker's test database, so side effects carry on.
|
|
|
|
|
|
|
|
All ZulipTransactionTestCase tests must be carefully written to
|
|
|
|
avoid side effects on the database; while Django runs
|
|
|
|
TransactionTestCase after all normal TestCase tests on a given
|
|
|
|
test worker to avoid pollution, they can break other
|
|
|
|
ZulipTransactionTestCase tests if they leak state.
|
|
|
|
"""
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
user_groups: Make locks required for updating user group memberships.
**Background**
User groups are expected to comply with the DAG constraint for the
many-to-many inter-group membership. The check for this constraint has
to be performed recursively so that we can find all direct and indirect
subgroups of the user group to be added.
This kind of check is vulnerable to phantom reads which is possible at
the default read committed isolation level because we cannot guarantee
that the check is still valid when we are adding the subgroups to the
user group.
**Solution**
To avoid having another transaction concurrently update one of the
to-be-subgroup after the recursive check is done, and before the subgroup
is added, we use SELECT FOR UPDATE to lock the user group rows.
The lock needs to be acquired before a group membership change is about
to occur before any check has been conducted.
Suppose that we are adding subgroup B to supergroup A, the locking protocol
is specified as follows:
1. Acquire a lock for B and all its direct and indirect subgroups.
2. Acquire a lock for A.
For the removal of user groups, we acquire a lock for the user group to
be removed with all its direct and indirect subgroups. This is the special
case A=B, which is still complaint with the protocol.
**Error handling**
We currently rely on Postgres' deadlock detection to abort transactions
and show an error for the users. In the future, we might need some
recovery mechanism or at least better error handling.
**Notes**
An important note is that we need to reuse the recursive CTE query that
finds the direct and indirect subgroups when applying the lock on the
rows. And the lock needs to be acquired the same way for the addition and
removal of direct subgroups.
User membership change (as opposed to user group membership) is not
affected. Read-only queries aren't either. The locks only protect
critical regions where the user group dependency graph might violate
the DAG constraint, where users are not participating.
**Testing**
We implement a transaction test case targeting some typical scenarios
when an internal server error is expected to happen (this means that the
user group view makes the correct decision to abort the transaction when
something goes wrong with locks).
To achieve this, we add a development view intended only for unit tests.
It has a global BARRIER that can be shared across threads, so that we
can synchronize them to consistently reproduce certain potential race
conditions prevented by the database locks.
The transaction test case lanuches pairs of threads initiating possibly
conflicting requests at the same time. The tests are set up such that exactly N
of them are expected to succeed with a certain error message (while we don't
know each one).
**Security notes**
get_recursive_subgroups_for_groups will no longer fetch user groups from
other realms. As a result, trying to add/remove a subgroup from another
realm results in a UserGroup not found error response.
We also implement subgroup-specific checks in has_user_group_access to
keep permission managing in a single place. Do note that the API
currently don't have a way to violate that check because we are only
checking the realm ID now.
2023-06-17 04:39:52 +02:00
|
|
|
def setUp(self) -> None:
|
2023-07-03 22:26:19 +02:00
|
|
|
super().setUp()
|
|
|
|
self.models_ids_set = dict(get_row_ids_in_all_tables())
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
user_groups: Make locks required for updating user group memberships.
**Background**
User groups are expected to comply with the DAG constraint for the
many-to-many inter-group membership. The check for this constraint has
to be performed recursively so that we can find all direct and indirect
subgroups of the user group to be added.
This kind of check is vulnerable to phantom reads which is possible at
the default read committed isolation level because we cannot guarantee
that the check is still valid when we are adding the subgroups to the
user group.
**Solution**
To avoid having another transaction concurrently update one of the
to-be-subgroup after the recursive check is done, and before the subgroup
is added, we use SELECT FOR UPDATE to lock the user group rows.
The lock needs to be acquired before a group membership change is about
to occur before any check has been conducted.
Suppose that we are adding subgroup B to supergroup A, the locking protocol
is specified as follows:
1. Acquire a lock for B and all its direct and indirect subgroups.
2. Acquire a lock for A.
For the removal of user groups, we acquire a lock for the user group to
be removed with all its direct and indirect subgroups. This is the special
case A=B, which is still complaint with the protocol.
**Error handling**
We currently rely on Postgres' deadlock detection to abort transactions
and show an error for the users. In the future, we might need some
recovery mechanism or at least better error handling.
**Notes**
An important note is that we need to reuse the recursive CTE query that
finds the direct and indirect subgroups when applying the lock on the
rows. And the lock needs to be acquired the same way for the addition and
removal of direct subgroups.
User membership change (as opposed to user group membership) is not
affected. Read-only queries aren't either. The locks only protect
critical regions where the user group dependency graph might violate
the DAG constraint, where users are not participating.
**Testing**
We implement a transaction test case targeting some typical scenarios
when an internal server error is expected to happen (this means that the
user group view makes the correct decision to abort the transaction when
something goes wrong with locks).
To achieve this, we add a development view intended only for unit tests.
It has a global BARRIER that can be shared across threads, so that we
can synchronize them to consistently reproduce certain potential race
conditions prevented by the database locks.
The transaction test case lanuches pairs of threads initiating possibly
conflicting requests at the same time. The tests are set up such that exactly N
of them are expected to succeed with a certain error message (while we don't
know each one).
**Security notes**
get_recursive_subgroups_for_groups will no longer fetch user groups from
other realms. As a result, trying to add/remove a subgroup from another
realm results in a UserGroup not found error response.
We also implement subgroup-specific checks in has_user_group_access to
keep permission managing in a single place. Do note that the API
currently don't have a way to violate that check because we are only
checking the realm ID now.
2023-06-17 04:39:52 +02:00
|
|
|
def tearDown(self) -> None:
|
2023-07-03 22:26:19 +02:00
|
|
|
"""Verifies that the test did not adjust the set of rows in the test
|
|
|
|
database. This is a sanity check to help ensure that tests
|
|
|
|
using this class do not have unintended side effects on the
|
|
|
|
test database.
|
|
|
|
"""
|
|
|
|
super().tearDown()
|
|
|
|
for table_name, ids in get_row_ids_in_all_tables():
|
|
|
|
self.assertSetEqual(
|
|
|
|
self.models_ids_set[table_name],
|
|
|
|
ids,
|
|
|
|
f"{table_name} got a different set of ids after this test",
|
|
|
|
)
|
|
|
|
|
|
|
|
def _fixture_teardown(self) -> None:
|
|
|
|
"""We override the default _fixture_teardown method defined on
|
|
|
|
TransactionTestCase, so that the test database does not get
|
|
|
|
flushed/deleted after each test using this class.
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
2016-11-10 19:30:09 +01:00
|
|
|
class WebhookTestCase(ZulipTestCase):
|
2021-06-26 10:07:54 +02:00
|
|
|
"""Shared test class for all incoming webhooks tests.
|
|
|
|
|
|
|
|
Used by configuring the below class attributes, and calling
|
|
|
|
send_and_test_message in individual tests.
|
|
|
|
|
|
|
|
* Tests can override build_webhook_url if the webhook requires a
|
|
|
|
different URL format.
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-06-26 10:07:54 +02:00
|
|
|
* Tests can override get_body for cases where there is no
|
|
|
|
available fixture file.
|
|
|
|
|
|
|
|
* Tests should specify WEBHOOK_DIR_NAME to enforce that all event
|
|
|
|
types are declared in the @webhook_view decorator. This is
|
|
|
|
important for ensuring we document all fully supported event types.
|
2016-11-10 19:30:09 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-05-04 22:02:50 +02:00
|
|
|
CHANNEL_NAME: Optional[str] = None
|
2021-02-12 08:20:45 +01:00
|
|
|
TEST_USER_EMAIL = "webhook-bot@zulip.com"
|
2020-07-05 02:14:06 +02:00
|
|
|
URL_TEMPLATE: str
|
2021-06-26 09:18:33 +02:00
|
|
|
WEBHOOK_DIR_NAME: Optional[str] = None
|
2021-06-26 10:07:54 +02:00
|
|
|
# This last parameter is a workaround to handle webhooks that do not
|
|
|
|
# name the main function api_{WEBHOOK_DIR_NAME}_webhook.
|
|
|
|
VIEW_FUNCTION_NAME: Optional[str] = None
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2017-08-25 06:37:47 +02:00
|
|
|
@property
|
2017-11-27 05:27:04 +01:00
|
|
|
def test_user(self) -> UserProfile:
|
2023-08-19 23:26:20 +02:00
|
|
|
return self.get_user_from_email(self.TEST_USER_EMAIL, get_realm("zulip"))
|
2017-08-25 06:37:47 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-27 05:27:04 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2016-11-10 19:30:09 +01:00
|
|
|
self.url = self.build_webhook_url()
|
|
|
|
|
2021-07-17 18:05:15 +02:00
|
|
|
if self.WEBHOOK_DIR_NAME is not None:
|
2021-06-26 10:07:54 +02:00
|
|
|
# If VIEW_FUNCTION_NAME is explicitly specified and
|
|
|
|
# WEBHOOK_DIR_NAME is not None, an exception will be
|
|
|
|
# raised when a test triggers events that are not
|
|
|
|
# explicitly specified via the event_types parameter to
|
|
|
|
# the @webhook_view decorator.
|
2021-07-17 18:05:15 +02:00
|
|
|
if self.VIEW_FUNCTION_NAME is None:
|
|
|
|
function = import_string(
|
|
|
|
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.api_{self.WEBHOOK_DIR_NAME}_webhook"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
function = import_string(
|
|
|
|
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.{self.VIEW_FUNCTION_NAME}"
|
|
|
|
)
|
2021-06-26 10:07:54 +02:00
|
|
|
all_event_types = None
|
|
|
|
|
|
|
|
if hasattr(function, "_all_event_types"):
|
|
|
|
all_event_types = function._all_event_types
|
|
|
|
|
|
|
|
if all_event_types is None:
|
|
|
|
return # nocoverage
|
|
|
|
|
|
|
|
def side_effect(*args: Any, **kwargs: Any) -> None:
|
|
|
|
complete_event_type = (
|
|
|
|
kwargs.get("complete_event_type")
|
|
|
|
if len(args) < 5
|
|
|
|
else args[4] # complete_event_type is the argument at index 4
|
|
|
|
)
|
|
|
|
if (
|
|
|
|
complete_event_type is not None
|
|
|
|
and all_event_types is not None
|
|
|
|
and complete_event_type not in all_event_types
|
2022-06-24 00:14:37 +02:00
|
|
|
): # nocoverage
|
2021-06-26 10:07:54 +02:00
|
|
|
raise Exception(
|
|
|
|
f"""
|
|
|
|
Error: This test triggered a message using the event "{complete_event_type}", which was not properly
|
|
|
|
registered via the @webhook_view(..., event_types=[...]). These registrations are important for Zulip
|
|
|
|
self-documenting the supported event types for this integration.
|
|
|
|
|
|
|
|
You can fix this by adding "{complete_event_type}" to ALL_EVENT_TYPES for this webhook.
|
|
|
|
""".strip()
|
|
|
|
)
|
|
|
|
check_send_webhook_message(*args, **kwargs)
|
|
|
|
|
|
|
|
self.patch = mock.patch(
|
|
|
|
f"zerver.webhooks.{self.WEBHOOK_DIR_NAME}.view.check_send_webhook_message",
|
|
|
|
side_effect=side_effect,
|
|
|
|
)
|
|
|
|
self.patch.start()
|
|
|
|
self.addCleanup(self.patch.stop)
|
|
|
|
|
2024-05-04 22:04:48 +02:00
|
|
|
def api_channel_message(
|
2021-12-17 08:14:22 +01:00
|
|
|
self,
|
|
|
|
user: UserProfile,
|
|
|
|
fixture_name: str,
|
|
|
|
expected_topic: Optional[str] = None,
|
|
|
|
expected_message: Optional[str] = None,
|
|
|
|
content_type: Optional[str] = "application/json",
|
|
|
|
expect_noop: bool = False,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2022-06-13 21:46:53 +02:00
|
|
|
) -> None:
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["HTTP_AUTHORIZATION"] = self.encode_user(user)
|
2022-06-13 21:46:53 +02:00
|
|
|
self.check_webhook(
|
2022-06-14 22:44:49 +02:00
|
|
|
fixture_name,
|
|
|
|
expected_topic,
|
|
|
|
expected_message,
|
|
|
|
content_type,
|
|
|
|
expect_noop,
|
|
|
|
**extra,
|
2021-12-17 08:14:22 +01:00
|
|
|
)
|
2017-12-14 19:02:02 +01:00
|
|
|
|
2020-08-23 15:49:24 +02:00
|
|
|
def check_webhook(
|
|
|
|
self,
|
|
|
|
fixture_name: str,
|
2024-01-14 14:38:50 +01:00
|
|
|
expected_topic_name: Optional[str] = None,
|
2021-06-30 09:46:14 +02:00
|
|
|
expected_message: Optional[str] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
content_type: Optional[str] = "application/json",
|
2021-12-17 08:14:22 +01:00
|
|
|
expect_noop: bool = False,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2020-08-24 17:47:30 +02:00
|
|
|
) -> None:
|
2020-08-23 15:49:24 +02:00
|
|
|
"""
|
|
|
|
check_webhook is the main way to test "normal" webhooks that
|
|
|
|
work by receiving a payload from a third party and then writing
|
|
|
|
some message to a Zulip stream.
|
|
|
|
|
|
|
|
We use `fixture_name` to find the payload data in of our test
|
|
|
|
fixtures. Then we verify that a message gets sent to a stream:
|
|
|
|
|
2024-05-04 22:02:50 +02:00
|
|
|
self.CHANNEL_NAME: stream name
|
2024-01-14 14:38:50 +01:00
|
|
|
expected_topic_name: topic name
|
2020-08-23 15:49:24 +02:00
|
|
|
expected_message: content
|
|
|
|
|
|
|
|
We simulate the delivery of the payload with `content_type`,
|
2022-06-14 22:44:49 +02:00
|
|
|
and you can pass other headers via `extra`.
|
2020-08-23 15:49:24 +02:00
|
|
|
|
2023-06-19 16:42:11 +02:00
|
|
|
For the rare cases of webhooks actually sending direct messages,
|
2020-08-23 15:49:24 +02:00
|
|
|
see send_and_test_private_message.
|
2021-06-30 09:46:14 +02:00
|
|
|
|
|
|
|
When no message is expected to be sent, set `expect_noop` to True.
|
2020-08-23 15:49:24 +02:00
|
|
|
"""
|
2024-05-04 22:02:50 +02:00
|
|
|
assert self.CHANNEL_NAME is not None
|
|
|
|
self.subscribe(self.test_user, self.CHANNEL_NAME)
|
2020-08-23 19:30:12 +02:00
|
|
|
|
2020-08-20 17:03:43 +02:00
|
|
|
payload = self.get_payload(fixture_name)
|
2016-11-10 19:30:09 +01:00
|
|
|
if content_type is not None:
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["content_type"] = content_type
|
2021-06-26 09:18:33 +02:00
|
|
|
if self.WEBHOOK_DIR_NAME is not None:
|
|
|
|
headers = get_fixture_http_headers(self.WEBHOOK_DIR_NAME, fixture_name)
|
2020-07-05 02:14:06 +02:00
|
|
|
headers = standardize_headers(headers)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra.update(headers)
|
2021-06-30 09:46:14 +02:00
|
|
|
try:
|
|
|
|
msg = self.send_webhook_payload(
|
|
|
|
self.test_user,
|
|
|
|
self.url,
|
|
|
|
payload,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra,
|
2021-06-30 09:46:14 +02:00
|
|
|
)
|
|
|
|
except EmptyResponseError:
|
|
|
|
if expect_noop:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise AssertionError(
|
|
|
|
"No message was sent. Pass expect_noop=True if this is intentional."
|
|
|
|
)
|
2020-08-23 19:30:12 +02:00
|
|
|
|
2021-06-30 09:46:14 +02:00
|
|
|
if expect_noop:
|
|
|
|
raise Exception(
|
|
|
|
"""
|
|
|
|
While no message is expected given expect_noop=True,
|
|
|
|
your test code triggered an endpoint that did write
|
|
|
|
one or more new messages.
|
|
|
|
""".strip()
|
|
|
|
)
|
2024-01-14 14:38:50 +01:00
|
|
|
assert expected_message is not None and expected_topic_name is not None
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2024-05-04 22:09:56 +02:00
|
|
|
self.assert_channel_message(
|
2020-08-24 17:47:30 +02:00
|
|
|
message=msg,
|
2024-05-04 22:24:07 +02:00
|
|
|
channel_name=self.CHANNEL_NAME,
|
2024-01-14 14:38:50 +01:00
|
|
|
topic_name=expected_topic_name,
|
2020-08-24 17:47:30 +02:00
|
|
|
content=expected_message,
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2024-05-04 22:09:56 +02:00
|
|
|
def assert_channel_message(
|
2020-08-24 14:21:58 +02:00
|
|
|
self,
|
|
|
|
message: Message,
|
2024-05-04 22:24:07 +02:00
|
|
|
channel_name: str,
|
2020-08-24 14:21:58 +02:00
|
|
|
topic_name: str,
|
|
|
|
content: str,
|
|
|
|
) -> None:
|
2024-05-04 22:24:07 +02:00
|
|
|
self.assert_message_stream_name(message, channel_name)
|
2020-08-24 14:21:58 +02:00
|
|
|
self.assertEqual(message.topic_name(), topic_name)
|
|
|
|
self.assertEqual(message.content, content)
|
|
|
|
|
2020-07-05 02:14:06 +02:00
|
|
|
def send_and_test_private_message(
|
|
|
|
self,
|
|
|
|
fixture_name: str,
|
2020-08-23 16:45:07 +02:00
|
|
|
expected_message: str,
|
|
|
|
content_type: str = "application/json",
|
2021-12-17 08:14:22 +01:00
|
|
|
*,
|
|
|
|
sender: Optional[UserProfile] = None,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra: str,
|
2020-07-05 02:14:06 +02:00
|
|
|
) -> Message:
|
2020-08-23 15:49:24 +02:00
|
|
|
"""
|
|
|
|
For the rare cases that you are testing a webhook that sends
|
2023-06-19 16:42:11 +02:00
|
|
|
direct messages, use this function.
|
2020-08-23 15:49:24 +02:00
|
|
|
|
|
|
|
Most webhooks send to streams, and you will want to look at
|
|
|
|
check_webhook.
|
|
|
|
"""
|
2020-08-20 17:03:43 +02:00
|
|
|
payload = self.get_payload(fixture_name)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra["content_type"] = content_type
|
2020-08-23 16:45:07 +02:00
|
|
|
|
2021-06-26 09:18:33 +02:00
|
|
|
if self.WEBHOOK_DIR_NAME is not None:
|
|
|
|
headers = get_fixture_http_headers(self.WEBHOOK_DIR_NAME, fixture_name)
|
2020-07-05 02:14:06 +02:00
|
|
|
headers = standardize_headers(headers)
|
2022-06-14 22:44:49 +02:00
|
|
|
extra.update(headers)
|
2021-12-17 08:14:22 +01:00
|
|
|
|
|
|
|
if sender is None:
|
|
|
|
sender = self.test_user
|
2020-08-23 19:30:12 +02:00
|
|
|
|
2020-08-23 19:09:27 +02:00
|
|
|
msg = self.send_webhook_payload(
|
|
|
|
sender,
|
|
|
|
self.url,
|
|
|
|
payload,
|
2022-06-14 22:44:49 +02:00
|
|
|
**extra,
|
2020-08-23 19:09:27 +02:00
|
|
|
)
|
2020-08-24 17:32:54 +02:00
|
|
|
self.assertEqual(msg.content, expected_message)
|
2016-11-10 19:30:09 +01:00
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2021-12-17 08:14:22 +01:00
|
|
|
def build_webhook_url(self, *args: str, **kwargs: str) -> str:
|
2017-04-21 23:35:40 +02:00
|
|
|
url = self.URL_TEMPLATE
|
|
|
|
if url.find("api_key") >= 0:
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(self.test_user)
|
2024-05-04 22:02:50 +02:00
|
|
|
url = self.URL_TEMPLATE.format(api_key=api_key, stream=self.CHANNEL_NAME)
|
2017-04-21 23:35:40 +02:00
|
|
|
else:
|
2024-05-04 22:02:50 +02:00
|
|
|
url = self.URL_TEMPLATE.format(stream=self.CHANNEL_NAME)
|
2017-04-21 23:35:40 +02:00
|
|
|
|
|
|
|
has_arguments = kwargs or args
|
2021-02-12 08:20:45 +01:00
|
|
|
if has_arguments and url.find("?") == -1:
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{url}?" # nocoverage
|
2017-04-06 23:26:29 +02:00
|
|
|
else:
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{url}&"
|
2017-04-06 23:26:29 +02:00
|
|
|
|
|
|
|
for key, value in kwargs.items():
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{url}{key}={value}&"
|
2017-04-06 23:26:29 +02:00
|
|
|
|
2017-04-21 23:35:40 +02:00
|
|
|
for arg in args:
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{url}{arg}&"
|
2017-04-21 23:35:40 +02:00
|
|
|
|
|
|
|
return url[:-1] if has_arguments else url
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2020-08-20 17:03:43 +02:00
|
|
|
def get_payload(self, fixture_name: str) -> Union[str, Dict[str, str]]:
|
|
|
|
"""
|
|
|
|
Generally webhooks that override this should return dicts."""
|
|
|
|
return self.get_body(fixture_name)
|
|
|
|
|
|
|
|
def get_body(self, fixture_name: str) -> str:
|
2021-06-26 09:18:33 +02:00
|
|
|
assert self.WEBHOOK_DIR_NAME is not None
|
|
|
|
body = self.webhook_fixture_data(self.WEBHOOK_DIR_NAME, fixture_name)
|
2020-08-20 17:40:09 +02:00
|
|
|
# fail fast if we don't have valid json
|
|
|
|
orjson.loads(body)
|
|
|
|
return body
|
2016-11-10 19:30:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-10-19 10:17:09 +02:00
|
|
|
class MigrationsTestCase(ZulipTransactionTestCase): # nocoverage
|
2018-05-21 18:56:45 +02:00
|
|
|
"""
|
|
|
|
Test class for database migrations inspired by this blog post:
|
|
|
|
https://www.caktusgroup.com/blog/2016/02/02/writing-unit-tests-django-migrations/
|
|
|
|
Documented at https://zulip.readthedocs.io/en/latest/subsystems/schema-migrations.html
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-09 18:19:55 +02:00
|
|
|
@property
|
|
|
|
def app(self) -> str:
|
2021-07-24 16:56:39 +02:00
|
|
|
app_config = apps.get_containing_app_config(type(self).__module__)
|
|
|
|
assert app_config is not None
|
|
|
|
return app_config.name
|
2018-04-09 18:19:55 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
migrate_from: Optional[str] = None
|
|
|
|
migrate_to: Optional[str] = None
|
2018-04-09 18:19:55 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-04-09 18:19:55 +02:00
|
|
|
def setUp(self) -> None:
|
2023-10-19 10:17:09 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:19:30 +01:00
|
|
|
assert (
|
|
|
|
self.migrate_from and self.migrate_to
|
|
|
|
), f"TestCase '{type(self).__name__}' must define migrate_from and migrate_to properties"
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
migrate_from: List[Tuple[str, str]] = [(self.app, self.migrate_from)]
|
|
|
|
migrate_to: List[Tuple[str, str]] = [(self.app, self.migrate_to)]
|
2018-04-09 18:19:55 +02:00
|
|
|
executor = MigrationExecutor(connection)
|
|
|
|
old_apps = executor.loader.project_state(migrate_from).apps
|
|
|
|
|
|
|
|
# Reverse to the original migration
|
|
|
|
executor.migrate(migrate_from)
|
|
|
|
|
|
|
|
self.setUpBeforeMigration(old_apps)
|
|
|
|
|
|
|
|
# Run the migration to test
|
|
|
|
executor = MigrationExecutor(connection)
|
|
|
|
executor.loader.build_graph() # reload.
|
|
|
|
executor.migrate(migrate_to)
|
|
|
|
|
|
|
|
self.apps = executor.loader.project_state(migrate_to).apps
|
|
|
|
|
|
|
|
def setUpBeforeMigration(self, apps: StateApps) -> None:
|
|
|
|
pass # nocoverage
|
2022-04-14 23:58:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_topic_messages(user_profile: UserProfile, stream: Stream, topic_name: str) -> List[Message]:
|
|
|
|
query = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__recipient=stream.recipient,
|
|
|
|
).order_by("id")
|
|
|
|
return [um.message for um in filter_by_topic_name_via_message(query, topic_name)]
|
2023-11-26 22:42:43 +01:00
|
|
|
|
|
|
|
|
|
|
|
@skipUnless(settings.ZILENCER_ENABLED, "requires zilencer")
|
|
|
|
class BouncerTestCase(ZulipTestCase):
|
|
|
|
@override
|
|
|
|
def setUp(self) -> None:
|
2023-12-04 19:42:14 +01:00
|
|
|
# Set a deterministic uuid and a nice hostname for convenience.
|
2023-11-26 22:42:43 +01:00
|
|
|
self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe"
|
2023-12-01 18:06:22 +01:00
|
|
|
self.server = RemoteZulipServer.objects.all().latest("id")
|
|
|
|
|
|
|
|
self.server.uuid = self.server_uuid
|
2023-12-04 19:42:14 +01:00
|
|
|
self.server.hostname = "demo.example.com"
|
2023-11-26 22:42:43 +01:00
|
|
|
self.server.save()
|
2023-12-01 18:06:22 +01:00
|
|
|
|
2023-11-26 22:42:43 +01:00
|
|
|
super().setUp()
|
|
|
|
|
|
|
|
@override
|
|
|
|
def tearDown(self) -> None:
|
|
|
|
RemoteZulipServer.objects.filter(uuid=self.server_uuid).delete()
|
|
|
|
super().tearDown()
|
|
|
|
|
|
|
|
def request_callback(self, request: PreparedRequest) -> Tuple[int, ResponseHeaders, bytes]:
|
|
|
|
kwargs = {}
|
|
|
|
if isinstance(request.body, bytes):
|
|
|
|
# send_json_to_push_bouncer sends the body as bytes containing json.
|
|
|
|
data = orjson.loads(request.body)
|
|
|
|
kwargs = dict(content_type="application/json")
|
|
|
|
else:
|
|
|
|
assert isinstance(request.body, str) or request.body is None
|
2023-12-05 21:14:17 +01:00
|
|
|
params: Dict[str, List[str]] = parse_qs(request.body)
|
2023-11-26 22:42:43 +01:00
|
|
|
# In Python 3, the values of the dict from `parse_qs` are
|
|
|
|
# in a list, because there might be multiple values.
|
|
|
|
# But since we are sending values with no same keys, hence
|
|
|
|
# we can safely pick the first value.
|
|
|
|
data = {k: v[0] for k, v in params.items()}
|
|
|
|
assert request.url is not None # allow mypy to infer url is present.
|
|
|
|
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
|
|
|
local_url = request.url.replace(settings.PUSH_NOTIFICATION_BOUNCER_URL, "")
|
|
|
|
if request.method == "POST":
|
|
|
|
result = self.uuid_post(self.server_uuid, local_url, data, subdomain="", **kwargs)
|
|
|
|
elif request.method == "GET":
|
|
|
|
result = self.uuid_get(self.server_uuid, local_url, data, subdomain="")
|
|
|
|
return (result.status_code, result.headers, result.content)
|
|
|
|
|
|
|
|
def add_mock_response(self) -> None:
|
|
|
|
# Match any endpoint with the PUSH_NOTIFICATION_BOUNCER_URL.
|
|
|
|
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
2024-04-26 20:30:22 +02:00
|
|
|
COMPILED_URL = re.compile(settings.PUSH_NOTIFICATION_BOUNCER_URL + r".*")
|
2023-11-26 22:42:43 +01:00
|
|
|
responses.add_callback(responses.POST, COMPILED_URL, callback=self.request_callback)
|
|
|
|
responses.add_callback(responses.GET, COMPILED_URL, callback=self.request_callback)
|
|
|
|
|
|
|
|
def get_generic_payload(self, method: str = "register") -> Dict[str, Any]:
|
|
|
|
user_id = 10
|
|
|
|
token = "111222"
|
|
|
|
token_kind = PushDeviceToken.GCM
|
|
|
|
|
|
|
|
return {"user_id": user_id, "token": token, "token_kind": token_kind}
|