2017-11-16 00:55:49 +01:00
|
|
|
import itertools
|
|
|
|
import os
|
|
|
|
import random
|
2020-11-13 22:31:41 +01:00
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Mapping, Sequence
|
2023-03-04 01:50:20 +01:00
|
|
|
from datetime import datetime, timedelta
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2017-11-16 00:55:49 +01:00
|
|
|
|
2020-05-09 22:23:33 +02:00
|
|
|
import bmemcached
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2017-11-16 00:55:49 +01:00
|
|
|
from django.conf import settings
|
2020-01-14 03:03:14 +01:00
|
|
|
from django.contrib.sessions.models import Session
|
2021-07-26 16:46:53 +02:00
|
|
|
from django.core.files.base import File
|
2018-07-09 09:14:00 +02:00
|
|
|
from django.core.management import call_command
|
2024-05-24 16:49:56 +02:00
|
|
|
from django.core.management.base import CommandParser
|
2024-01-24 05:27:43 +01:00
|
|
|
from django.core.validators import validate_email
|
2021-02-04 18:15:38 +01:00
|
|
|
from django.db import connection
|
2020-12-29 05:18:25 +01:00
|
|
|
from django.db.models import F
|
2023-02-17 18:29:21 +01:00
|
|
|
from django.db.models.signals import post_delete
|
2017-11-16 00:55:49 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2012-08-28 18:45:10 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from scripts.lib.zulip_tools import get_or_create_dev_uuid_var_path
|
2022-04-14 23:58:15 +02:00
|
|
|
from zerver.actions.create_realm import do_create_realm
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import (
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
try_add_realm_custom_profile_field,
|
|
|
|
try_add_realm_default_custom_profile_field,
|
|
|
|
)
|
2022-04-14 23:50:10 +02:00
|
|
|
from zerver.actions.message_send import build_message_send_dict, do_send_messages
|
2022-04-14 23:40:49 +02:00
|
|
|
from zerver.actions.realm_emoji import check_add_realm_emoji
|
2023-08-11 01:10:21 +02:00
|
|
|
from zerver.actions.realm_linkifiers import do_add_linkifier
|
2023-04-28 13:35:00 +02:00
|
|
|
from zerver.actions.scheduled_messages import check_schedule_message
|
2022-05-14 13:07:14 +02:00
|
|
|
from zerver.actions.streams import bulk_add_subscriptions
|
2022-12-14 06:53:21 +01:00
|
|
|
from zerver.actions.user_groups import create_user_group_in_database
|
2023-06-17 17:37:04 +02:00
|
|
|
from zerver.actions.user_settings import do_change_user_setting
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import do_change_user_role
|
2020-11-13 22:31:41 +01:00
|
|
|
from zerver.lib.bulk_create import bulk_create_streams
|
2020-05-05 07:55:47 +02:00
|
|
|
from zerver.lib.generate_test_data import create_test_data, generate_topics
|
2024-05-24 16:49:56 +02:00
|
|
|
from zerver.lib.management import ZulipBaseCommand
|
2018-05-23 05:46:30 +02:00
|
|
|
from zerver.lib.onboarding import create_if_missing_realm_internal_bots
|
2018-11-27 18:49:35 +01:00
|
|
|
from zerver.lib.push_notifications import logger as push_notifications_logger
|
2023-12-01 18:06:22 +01:00
|
|
|
from zerver.lib.remote_server import get_realms_info_for_push_bouncer
|
2020-01-27 16:12:43 +01:00
|
|
|
from zerver.lib.server_initialization import create_internal_realm, create_users
|
2019-07-17 02:29:08 +02:00
|
|
|
from zerver.lib.storage import static_path
|
2022-03-12 13:46:20 +01:00
|
|
|
from zerver.lib.stream_color import STREAM_ASSIGNMENT_COLORS
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.types import ProfileFieldData
|
|
|
|
from zerver.lib.users import add_service
|
2018-08-01 11:18:37 +02:00
|
|
|
from zerver.lib.utils import generate_api_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
2020-07-09 15:48:10 +02:00
|
|
|
AlertWord,
|
2020-06-11 00:54:34 +02:00
|
|
|
Client,
|
|
|
|
CustomProfileField,
|
|
|
|
DefaultStream,
|
2024-07-05 13:13:40 +02:00
|
|
|
DirectMessageGroup,
|
2021-07-30 19:11:13 +02:00
|
|
|
Draft,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2020-11-13 22:31:41 +01:00
|
|
|
Reaction,
|
2020-06-11 00:54:34 +02:00
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmDomain,
|
2021-12-09 08:04:00 +01:00
|
|
|
RealmUserDefault,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
|
|
|
Service,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserMessage,
|
|
|
|
UserPresence,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 21:04:40 +01:00
|
|
|
from zerver.models.alert_words import flush_alert_word
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models.clients import get_client
|
2024-05-22 11:43:10 +02:00
|
|
|
from zerver.models.realms import WildcardMentionPolicyEnum, get_realm
|
2024-07-04 14:05:48 +02:00
|
|
|
from zerver.models.recipients import get_or_create_direct_message_group
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user, get_user_by_delivery_email, get_user_profile_by_id
|
2023-12-28 19:28:36 +01:00
|
|
|
from zilencer.models import RemoteRealm, RemoteZulipServer, RemoteZulipServerAuditLog
|
2023-12-01 18:06:22 +01:00
|
|
|
from zilencer.views import update_remote_realm_data_for_server
|
2019-06-09 20:27:12 +02:00
|
|
|
|
2024-04-18 18:17:04 +02:00
|
|
|
# Disable the push notifications bouncer to avoid enqueuing updates in
|
|
|
|
# maybe_enqueue_audit_log_upload during early setup.
|
|
|
|
settings.PUSH_NOTIFICATION_BOUNCER_URL = None
|
2020-09-17 00:30:45 +02:00
|
|
|
settings.USING_TORNADO = False
|
2017-02-27 05:46:17 +01:00
|
|
|
# Disable using memcached caches to avoid 'unsupported pickle
|
|
|
|
# protocol' errors if `populate_db` is run with a different Python
|
2023-03-04 02:17:54 +01:00
|
|
|
# from `run-dev`.
|
2021-02-12 08:20:45 +01:00
|
|
|
default_cache = settings.CACHES["default"]
|
|
|
|
settings.CACHES["default"] = {
|
|
|
|
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
2017-02-27 05:46:17 +01:00
|
|
|
}
|
2012-10-12 21:27:19 +02:00
|
|
|
|
2020-11-13 22:31:41 +01:00
|
|
|
DEFAULT_EMOJIS = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("+1", "1f44d"),
|
|
|
|
("smiley", "1f603"),
|
|
|
|
("eyes", "1f440"),
|
|
|
|
("crying_cat_face", "1f63f"),
|
|
|
|
("arrow_up", "2b06"),
|
|
|
|
("confetti_ball", "1f38a"),
|
|
|
|
("hundred_points", "1f4af"),
|
2020-11-13 22:31:41 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-01-14 03:03:14 +01:00
|
|
|
def clear_database() -> None:
|
|
|
|
# Hacky function only for use inside populate_db. Designed to
|
|
|
|
# allow running populate_db repeatedly in series to work without
|
|
|
|
# flushing memcached or clearing the database manually.
|
2020-01-14 03:22:34 +01:00
|
|
|
|
|
|
|
# With `zproject.test_settings`, we aren't using real memcached
|
|
|
|
# and; we only need to flush memcached if we're populating a
|
|
|
|
# database that would be used with it (i.e. zproject.dev_settings).
|
2022-04-05 03:27:33 +02:00
|
|
|
if default_cache["BACKEND"] == "zerver.lib.singleton_bmemcached.SingletonBMemcached":
|
2024-07-16 22:36:44 +02:00
|
|
|
memcached_client = bmemcached.Client(
|
2021-02-12 08:20:45 +01:00
|
|
|
(default_cache["LOCATION"],),
|
|
|
|
**default_cache["OPTIONS"],
|
2024-07-16 22:36:44 +02:00
|
|
|
)
|
|
|
|
try:
|
|
|
|
memcached_client.flush_all()
|
|
|
|
finally:
|
|
|
|
memcached_client.disconnect_all()
|
2020-01-14 03:22:34 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
model: Any = None # Hack because mypy doesn't know these are model classes
|
2023-02-17 18:29:21 +01:00
|
|
|
|
|
|
|
# The after-delete signal on this just updates caches, and slows
|
|
|
|
# down the deletion noticeably. Remove the signal and replace it
|
|
|
|
# after we're done.
|
|
|
|
post_delete.disconnect(flush_alert_word, sender=AlertWord)
|
2021-02-12 08:19:30 +01:00
|
|
|
for model in [
|
|
|
|
Message,
|
|
|
|
Stream,
|
2023-02-17 18:29:21 +01:00
|
|
|
AlertWord,
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile,
|
|
|
|
Recipient,
|
|
|
|
Realm,
|
|
|
|
Subscription,
|
2024-07-05 13:13:40 +02:00
|
|
|
DirectMessageGroup,
|
2021-02-12 08:19:30 +01:00
|
|
|
UserMessage,
|
|
|
|
Client,
|
|
|
|
DefaultStream,
|
2023-12-06 18:20:31 +01:00
|
|
|
RemoteRealm,
|
|
|
|
RemoteZulipServer,
|
2021-02-12 08:19:30 +01:00
|
|
|
]:
|
2020-01-14 03:03:14 +01:00
|
|
|
model.objects.all().delete()
|
|
|
|
Session.objects.all().delete()
|
2023-02-17 18:29:21 +01:00
|
|
|
post_delete.connect(flush_alert_word, sender=AlertWord)
|
2020-01-14 03:03:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def subscribe_users_to_streams(realm: Realm, stream_dict: dict[str, dict[str, Any]]) -> None:
|
2018-11-29 17:25:58 +01:00
|
|
|
subscriptions_to_add = []
|
|
|
|
event_time = timezone_now()
|
|
|
|
all_subscription_logs = []
|
2023-07-12 12:01:22 +02:00
|
|
|
profiles = UserProfile.objects.select_related("realm").filter(realm=realm)
|
2018-11-29 17:25:58 +01:00
|
|
|
for i, stream_name in enumerate(stream_dict):
|
|
|
|
stream = Stream.objects.get(name=stream_name, realm=realm)
|
|
|
|
recipient = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
|
|
|
|
for profile in profiles:
|
|
|
|
# Subscribe to some streams.
|
|
|
|
s = Subscription(
|
|
|
|
recipient=recipient,
|
|
|
|
user_profile=profile,
|
2021-02-14 00:03:40 +01:00
|
|
|
is_user_active=profile.is_active,
|
2021-02-12 08:19:30 +01:00
|
|
|
color=STREAM_ASSIGNMENT_COLORS[i % len(STREAM_ASSIGNMENT_COLORS)],
|
|
|
|
)
|
2018-11-29 17:25:58 +01:00
|
|
|
subscriptions_to_add.append(s)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
log = RealmAuditLog(
|
|
|
|
realm=profile.realm,
|
|
|
|
modified_user=profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_last_message_id=0,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
2018-11-29 17:25:58 +01:00
|
|
|
all_subscription_logs.append(log)
|
|
|
|
Subscription.objects.bulk_create(subscriptions_to_add)
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-09 15:48:10 +02:00
|
|
|
def create_alert_words(realm_id: int) -> None:
|
|
|
|
user_ids = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
is_bot=False,
|
|
|
|
is_active=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("id", flat=True)
|
2020-07-09 15:48:10 +02:00
|
|
|
|
|
|
|
alert_words = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"algorithms",
|
|
|
|
"complexity",
|
|
|
|
"founded",
|
|
|
|
"galaxy",
|
|
|
|
"grammar",
|
|
|
|
"illustrious",
|
|
|
|
"natural",
|
|
|
|
"objective",
|
|
|
|
"people",
|
|
|
|
"robotics",
|
|
|
|
"study",
|
2020-07-09 15:48:10 +02:00
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
recs: list[AlertWord] = []
|
2020-07-09 15:48:10 +02:00
|
|
|
for user_id in user_ids:
|
|
|
|
random.shuffle(alert_words)
|
2023-07-31 22:52:35 +02:00
|
|
|
recs.extend(
|
|
|
|
AlertWord(realm_id=realm_id, user_profile_id=user_id, word=word)
|
|
|
|
for word in alert_words[:4]
|
|
|
|
)
|
2020-07-09 15:48:10 +02:00
|
|
|
|
|
|
|
AlertWord.objects.bulk_create(recs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-05-24 16:49:56 +02:00
|
|
|
class Command(ZulipBaseCommand):
|
2012-08-28 18:45:10 +02:00
|
|
|
help = "Populate a test database"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-10-27 12:57:54 +02:00
|
|
|
def add_arguments(self, parser: CommandParser) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
parser.add_argument(
|
2022-06-08 21:06:51 +02:00
|
|
|
"-n", "--num-messages", type=int, default=1000, help="The number of messages to create."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2022-04-05 00:16:17 +02:00
|
|
|
parser.add_argument(
|
|
|
|
"-o",
|
|
|
|
"--oldest-message-days",
|
|
|
|
type=int,
|
|
|
|
default=5,
|
|
|
|
help="The start of the time range where messages could have been sent.",
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"-b",
|
|
|
|
"--batch-size",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=int,
|
|
|
|
default=1000,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="How many messages to process in a single batch",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--extra-users", type=int, default=0, help="The number of extra users to create"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--extra-bots", type=int, default=0, help="The number of extra bots to create"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--extra-streams", type=int, default=0, help="The number of extra streams to create"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
parser.add_argument("--max-topics", type=int, help="The number of maximum topics to create")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--huddles",
|
|
|
|
dest="num_huddles",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=int,
|
|
|
|
default=3,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="The number of huddles to create.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--personals",
|
|
|
|
dest="num_personals",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=int,
|
|
|
|
default=6,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="The number of personal pairs to create.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
parser.add_argument("--threads", type=int, default=1, help="The number of threads to use.")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--percent-huddles",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=float,
|
|
|
|
default=15,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="The percent of messages to be huddles.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--percent-personals",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=float,
|
|
|
|
default=15,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="The percent of messages to be personals.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2022-07-01 05:40:34 +02:00
|
|
|
"--stickiness",
|
2021-02-12 08:19:30 +01:00
|
|
|
type=float,
|
|
|
|
default=20,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="The percent of messages to repeat recent folks.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--nodelete",
|
2021-02-12 08:19:30 +01:00
|
|
|
action="store_false",
|
2021-02-12 08:20:45 +01:00
|
|
|
dest="delete",
|
|
|
|
help="Whether to delete all the existing messages.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--test-suite",
|
2021-02-12 08:19:30 +01:00
|
|
|
action="store_true",
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Configures populate_db to create a deterministic "
|
|
|
|
"data set for the backend tests.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-03 10:22:19 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-07-26 17:32:10 +02:00
|
|
|
def handle(self, *args: Any, **options: Any) -> None:
|
2021-07-26 18:59:34 +02:00
|
|
|
# Suppress spammy output from the push notifications logger
|
|
|
|
push_notifications_logger.disabled = True
|
|
|
|
|
2012-09-05 18:38:35 +02:00
|
|
|
if options["percent_huddles"] + options["percent_personals"] > 100:
|
|
|
|
self.stderr.write("Error! More than 100% of messages allocated.\n")
|
|
|
|
return
|
|
|
|
|
2018-05-19 03:26:34 +02:00
|
|
|
# Get consistent data for backend tests.
|
|
|
|
if options["test_suite"]:
|
|
|
|
random.seed(0)
|
|
|
|
|
2021-02-04 18:15:38 +01:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
# Sometimes bugs relating to confusing recipient.id for recipient.type_id
|
|
|
|
# or <object>.id for <object>.recipient_id remain undiscovered by the test suite
|
|
|
|
# due to these numbers happening to coincide in such a way that it makes tests
|
|
|
|
# accidentally pass. By bumping the Recipient.id sequence by a large enough number,
|
|
|
|
# we can have those ids in a completely different range of values than object ids,
|
2022-02-08 00:13:33 +01:00
|
|
|
# eliminating the possibility of such coincidences.
|
2021-02-04 18:15:38 +01:00
|
|
|
cursor.execute("SELECT setval('zerver_recipient_id_seq', 100)")
|
|
|
|
|
2020-05-01 19:30:05 +02:00
|
|
|
if options["max_topics"] is None:
|
2022-06-08 21:06:51 +02:00
|
|
|
# If max_topics is not set, we use a default that's big
|
2024-05-17 09:42:44 +02:00
|
|
|
# enough "show all topics" should appear, and scales slowly
|
2022-06-08 21:06:51 +02:00
|
|
|
# with the number of messages.
|
|
|
|
options["max_topics"] = 8 + options["num_messages"] // 1000
|
2020-05-01 19:30:05 +02:00
|
|
|
|
2012-09-10 20:38:29 +02:00
|
|
|
if options["delete"]:
|
2012-10-29 19:43:00 +01:00
|
|
|
# Start by clearing all the data in our database
|
|
|
|
clear_database()
|
2012-09-05 21:49:56 +02:00
|
|
|
|
2019-07-24 08:34:15 +02:00
|
|
|
# Create our three default realms
|
2022-04-14 23:58:15 +02:00
|
|
|
# Could in theory be done via zerver.actions.create_realm.do_create_realm, but
|
2016-09-28 00:03:13 +02:00
|
|
|
# welcome-bot (needed for do_create_realm) hasn't been created yet
|
2019-08-20 01:53:51 +02:00
|
|
|
create_internal_realm()
|
2021-04-29 17:22:48 +02:00
|
|
|
zulip_realm = do_create_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
string_id="zulip",
|
|
|
|
name="Zulip Dev",
|
|
|
|
emails_restricted_to_domains=False,
|
2017-11-04 14:16:50 +01:00
|
|
|
description="The Zulip development environment default organization."
|
2021-02-12 08:19:30 +01:00
|
|
|
" It's great for testing!",
|
|
|
|
invite_required=False,
|
2021-10-18 23:28:17 +02:00
|
|
|
plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
|
2021-06-24 20:05:06 +02:00
|
|
|
org_type=Realm.ORG_TYPES["business"]["id"],
|
2021-06-16 21:15:47 +02:00
|
|
|
enable_read_receipts=True,
|
2021-10-06 14:11:48 +02:00
|
|
|
enable_spectator_access=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-31 16:20:07 +02:00
|
|
|
RealmDomain.objects.create(realm=zulip_realm, domain="zulip.com")
|
2024-02-07 12:13:02 +01:00
|
|
|
assert zulip_realm.new_stream_announcements_stream is not None
|
|
|
|
zulip_realm.new_stream_announcements_stream.name = "Verona"
|
|
|
|
zulip_realm.new_stream_announcements_stream.description = "A city in Italy"
|
|
|
|
zulip_realm.new_stream_announcements_stream.save(update_fields=["name", "description"])
|
2021-04-29 17:22:48 +02:00
|
|
|
|
2021-12-09 08:04:00 +01:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=zulip_realm)
|
|
|
|
realm_user_default.enter_sends = True
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default.email_address_visibility = (
|
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_ADMINS
|
|
|
|
)
|
2021-12-09 08:04:00 +01:00
|
|
|
realm_user_default.save()
|
|
|
|
|
2015-08-20 03:54:07 +02:00
|
|
|
if options["test_suite"]:
|
2021-04-27 16:56:45 +02:00
|
|
|
mit_realm = do_create_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
string_id="zephyr",
|
|
|
|
name="MIT",
|
|
|
|
emails_restricted_to_domains=True,
|
|
|
|
invite_required=False,
|
2021-10-18 23:28:17 +02:00
|
|
|
plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
|
2021-06-24 20:05:06 +02:00
|
|
|
org_type=Realm.ORG_TYPES["business"]["id"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-03-31 16:20:07 +02:00
|
|
|
RealmDomain.objects.create(realm=mit_realm, domain="mit.edu")
|
2012-09-05 17:23:58 +02:00
|
|
|
|
2021-04-27 16:57:36 +02:00
|
|
|
lear_realm = do_create_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
string_id="lear",
|
|
|
|
name="Lear & Co.",
|
|
|
|
emails_restricted_to_domains=False,
|
|
|
|
invite_required=False,
|
2021-10-18 23:28:17 +02:00
|
|
|
plan_type=Realm.PLAN_TYPE_SELF_HOSTED,
|
2021-06-24 20:05:06 +02:00
|
|
|
org_type=Realm.ORG_TYPES["business"]["id"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-11-28 02:42:11 +01:00
|
|
|
|
2020-09-17 14:50:31 +02:00
|
|
|
# Default to allowing all members to send mentions in
|
|
|
|
# large streams for the test suite to keep
|
|
|
|
# mention-related tests simple.
|
2024-05-22 11:43:10 +02:00
|
|
|
zulip_realm.wildcard_mention_policy = WildcardMentionPolicyEnum.MEMBERS
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip_realm.save(update_fields=["wildcard_mention_policy"])
|
2020-09-17 14:50:31 +02:00
|
|
|
|
2023-12-01 18:06:22 +01:00
|
|
|
# Realms should have matching RemoteRealm entries - simulating having realms registered
|
|
|
|
# with the bouncer, which is going to be the primary case for modern servers. Tests
|
|
|
|
# wanting to have missing registrations, or simulating legacy server scenarios,
|
|
|
|
# should delete RemoteRealms to explicit set things up.
|
2023-12-04 19:42:14 +01:00
|
|
|
|
|
|
|
assert isinstance(settings.ZULIP_ORG_ID, str)
|
|
|
|
assert isinstance(settings.ZULIP_ORG_KEY, str)
|
2023-12-01 18:06:22 +01:00
|
|
|
server = RemoteZulipServer.objects.create(
|
2023-12-04 19:42:14 +01:00
|
|
|
uuid=settings.ZULIP_ORG_ID,
|
|
|
|
api_key=settings.ZULIP_ORG_KEY,
|
|
|
|
hostname=settings.EXTERNAL_HOST,
|
2023-12-01 18:06:22 +01:00
|
|
|
last_updated=timezone_now(),
|
2023-12-06 18:40:08 +01:00
|
|
|
contact_email="remotezulipserver@zulip.com",
|
2023-12-01 18:06:22 +01:00
|
|
|
)
|
2023-12-28 19:28:36 +01:00
|
|
|
RemoteZulipServerAuditLog.objects.create(
|
|
|
|
event_type=RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED,
|
|
|
|
server=server,
|
|
|
|
event_time=server.last_updated,
|
|
|
|
)
|
2023-12-01 18:06:22 +01:00
|
|
|
update_remote_realm_data_for_server(server, get_realms_info_for_push_bouncer())
|
|
|
|
|
2012-09-10 20:38:29 +02:00
|
|
|
# Create test Users (UserProfiles are automatically created,
|
|
|
|
# as are subscriptions to the ability to receive personals).
|
2016-11-18 21:46:47 +01:00
|
|
|
names = [
|
|
|
|
("Zoe", "ZOE@zulip.com"),
|
|
|
|
("Othello, the Moor of Venice", "othello@zulip.com"),
|
|
|
|
("Iago", "iago@zulip.com"),
|
|
|
|
("Prospero from The Tempest", "prospero@zulip.com"),
|
2021-04-11 16:26:54 +02:00
|
|
|
("Cordelia, Lear's daughter", "cordelia@zulip.com"),
|
2016-11-18 21:46:47 +01:00
|
|
|
("King Hamlet", "hamlet@zulip.com"),
|
|
|
|
("aaron", "AARON@zulip.com"),
|
2018-04-20 22:12:02 +02:00
|
|
|
("Polonius", "polonius@zulip.com"),
|
2020-05-17 18:46:14 +02:00
|
|
|
("Desdemona", "desdemona@zulip.com"),
|
2020-12-22 15:46:00 +01:00
|
|
|
("शिव", "shiva@zulip.com"),
|
2016-11-18 21:46:47 +01:00
|
|
|
]
|
2019-12-24 12:07:55 +01:00
|
|
|
|
|
|
|
# For testing really large batches:
|
|
|
|
# Create extra users with semi realistic names to make search
|
|
|
|
# functions somewhat realistic. We'll still create 1000 users
|
2022-02-08 00:13:33 +01:00
|
|
|
# like Extra222 User for some predictability.
|
2021-02-12 08:20:45 +01:00
|
|
|
num_names = options["extra_users"]
|
2020-05-30 16:17:51 +02:00
|
|
|
num_boring_names = 300
|
2019-12-24 12:07:55 +01:00
|
|
|
|
|
|
|
for i in range(min(num_names, num_boring_names)):
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name = f"Extra{i:03} User"
|
|
|
|
names.append((full_name, f"extrauser{i}@zulip.com"))
|
2019-12-24 12:07:55 +01:00
|
|
|
|
|
|
|
if num_names > num_boring_names:
|
2021-02-12 08:19:30 +01:00
|
|
|
fnames = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Amber",
|
|
|
|
"Arpita",
|
|
|
|
"Bob",
|
|
|
|
"Cindy",
|
|
|
|
"Daniela",
|
|
|
|
"Dan",
|
|
|
|
"Dinesh",
|
|
|
|
"Faye",
|
|
|
|
"François",
|
|
|
|
"George",
|
|
|
|
"Hank",
|
|
|
|
"Irene",
|
|
|
|
"James",
|
|
|
|
"Janice",
|
|
|
|
"Jenny",
|
|
|
|
"Jill",
|
|
|
|
"John",
|
|
|
|
"Kate",
|
|
|
|
"Katelyn",
|
|
|
|
"Kobe",
|
|
|
|
"Lexi",
|
|
|
|
"Manish",
|
|
|
|
"Mark",
|
|
|
|
"Matt",
|
|
|
|
"Mayna",
|
|
|
|
"Michael",
|
|
|
|
"Pete",
|
|
|
|
"Peter",
|
|
|
|
"Phil",
|
|
|
|
"Phillipa",
|
|
|
|
"Preston",
|
|
|
|
"Sally",
|
|
|
|
"Scott",
|
|
|
|
"Sandra",
|
|
|
|
"Steve",
|
|
|
|
"Stephanie",
|
|
|
|
"Vera",
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
mnames = ["de", "van", "von", "Shaw", "T."]
|
2021-02-12 08:19:30 +01:00
|
|
|
lnames = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Adams",
|
|
|
|
"Agarwal",
|
|
|
|
"Beal",
|
|
|
|
"Benson",
|
|
|
|
"Bonita",
|
|
|
|
"Davis",
|
|
|
|
"George",
|
|
|
|
"Harden",
|
|
|
|
"James",
|
|
|
|
"Jones",
|
|
|
|
"Johnson",
|
|
|
|
"Jordan",
|
|
|
|
"Lee",
|
|
|
|
"Leonard",
|
|
|
|
"Singh",
|
|
|
|
"Smith",
|
|
|
|
"Patel",
|
|
|
|
"Towns",
|
|
|
|
"Wall",
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-03-31 15:52:53 +02:00
|
|
|
non_ascii_names = [
|
|
|
|
"Günter",
|
|
|
|
"أحمد",
|
|
|
|
"Magnús",
|
|
|
|
"आशी",
|
|
|
|
"イツキ",
|
|
|
|
"语嫣",
|
|
|
|
"அருண்",
|
|
|
|
"Александр",
|
|
|
|
"José",
|
|
|
|
]
|
|
|
|
# to imitate emoji insertions in usernames
|
|
|
|
raw_emojis = ["😎", "😂", "🐱👤"]
|
2019-12-24 12:07:55 +01:00
|
|
|
|
|
|
|
for i in range(num_boring_names, num_names):
|
|
|
|
fname = random.choice(fnames) + str(i)
|
|
|
|
full_name = fname
|
|
|
|
if random.random() < 0.7:
|
2021-03-31 15:52:53 +02:00
|
|
|
if random.random() < 0.3:
|
|
|
|
full_name += " " + random.choice(non_ascii_names)
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name += " " + random.choice(mnames)
|
2021-03-31 15:52:53 +02:00
|
|
|
if random.random() < 0.1:
|
2021-08-02 23:36:06 +02:00
|
|
|
full_name += f" {random.choice(raw_emojis)} "
|
2021-03-31 15:52:53 +02:00
|
|
|
else:
|
|
|
|
full_name += " " + random.choice(lnames)
|
2024-01-24 05:27:43 +01:00
|
|
|
email = fname.lower().encode("ascii", "ignore").decode("ascii") + "@zulip.com"
|
|
|
|
validate_email(email)
|
2019-12-24 12:07:55 +01:00
|
|
|
names.append((full_name, email))
|
|
|
|
|
2021-12-07 02:23:24 +01:00
|
|
|
create_users(zulip_realm, names, tos_version=settings.TERMS_OF_SERVICE_VERSION)
|
2017-08-15 21:54:50 +02:00
|
|
|
|
2022-02-19 00:45:14 +01:00
|
|
|
# Add time zones to some users. Ideally, this would be
|
|
|
|
# done in the initial create_users calls, but the
|
|
|
|
# tuple-based interface for that function doesn't support
|
|
|
|
# doing so.
|
|
|
|
def assign_time_zone_by_delivery_email(delivery_email: str, new_time_zone: str) -> None:
|
|
|
|
u = get_user_by_delivery_email(delivery_email, zulip_realm)
|
|
|
|
u.timezone = new_time_zone
|
|
|
|
u.save(update_fields=["timezone"])
|
|
|
|
|
2022-02-24 21:15:43 +01:00
|
|
|
# Note: Hamlet keeps default time zone of "".
|
2022-02-19 00:45:14 +01:00
|
|
|
assign_time_zone_by_delivery_email("AARON@zulip.com", "US/Pacific")
|
|
|
|
assign_time_zone_by_delivery_email("othello@zulip.com", "US/Pacific")
|
|
|
|
assign_time_zone_by_delivery_email("ZOE@zulip.com", "US/Eastern")
|
|
|
|
assign_time_zone_by_delivery_email("iago@zulip.com", "US/Eastern")
|
|
|
|
assign_time_zone_by_delivery_email("desdemona@zulip.com", "Canada/Newfoundland")
|
|
|
|
assign_time_zone_by_delivery_email("polonius@zulip.com", "Asia/Shanghai") # China
|
|
|
|
assign_time_zone_by_delivery_email("shiva@zulip.com", "Asia/Kolkata") # India
|
|
|
|
assign_time_zone_by_delivery_email("cordelia@zulip.com", "UTC")
|
|
|
|
|
2020-03-12 13:51:54 +01:00
|
|
|
iago = get_user_by_delivery_email("iago@zulip.com", zulip_realm)
|
2020-06-29 15:07:10 +02:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2017-03-28 00:37:08 +02:00
|
|
|
iago.is_staff = True
|
2021-02-12 08:20:45 +01:00
|
|
|
iago.save(update_fields=["is_staff"])
|
2017-10-12 08:16:05 +02:00
|
|
|
|
2021-07-30 19:11:13 +02:00
|
|
|
# We need to create at least two test draft for Iago for the sake
|
|
|
|
# of the cURL tests. Two since one will be deleted.
|
|
|
|
Draft.objects.create(
|
|
|
|
user_profile=iago,
|
|
|
|
recipient=None,
|
|
|
|
topic="Release Notes",
|
|
|
|
content="Release 4.0 will contain ...",
|
2021-09-11 01:33:48 +02:00
|
|
|
last_edit_time=timezone_now(),
|
2021-07-30 19:11:13 +02:00
|
|
|
)
|
|
|
|
Draft.objects.create(
|
|
|
|
user_profile=iago,
|
|
|
|
recipient=None,
|
|
|
|
topic="Release Notes",
|
|
|
|
content="Release 4.0 will contain many new features such as ... ",
|
2021-09-11 01:33:48 +02:00
|
|
|
last_edit_time=timezone_now(),
|
2021-07-30 19:11:13 +02:00
|
|
|
)
|
|
|
|
|
2020-05-17 18:46:14 +02:00
|
|
|
desdemona = get_user_by_delivery_email("desdemona@zulip.com", zulip_realm)
|
2020-06-29 15:07:10 +02:00
|
|
|
do_change_user_role(desdemona, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2020-05-17 18:46:14 +02:00
|
|
|
|
2020-12-22 15:46:00 +01:00
|
|
|
shiva = get_user_by_delivery_email("shiva@zulip.com", zulip_realm)
|
|
|
|
do_change_user_role(shiva, UserProfile.ROLE_MODERATOR, acting_user=None)
|
|
|
|
|
2021-08-11 20:36:52 +02:00
|
|
|
polonius = get_user_by_delivery_email("polonius@zulip.com", zulip_realm)
|
|
|
|
do_change_user_role(polonius, UserProfile.ROLE_GUEST, acting_user=None)
|
2018-04-20 22:12:02 +02:00
|
|
|
|
2017-10-12 08:16:05 +02:00
|
|
|
# These bots are directly referenced from code and thus
|
|
|
|
# are needed for the test suite.
|
|
|
|
zulip_realm_bots = [
|
|
|
|
("Zulip Default Bot", "default-bot@zulip.com"),
|
2023-07-31 22:52:35 +02:00
|
|
|
*(
|
|
|
|
(f"Extra Bot {i}", f"extrabot{i}@zulip.com")
|
|
|
|
for i in range(options["extra_bots"])
|
|
|
|
),
|
2017-10-12 08:16:05 +02:00
|
|
|
]
|
2019-07-24 08:34:15 +02:00
|
|
|
|
2022-02-16 17:37:00 +01:00
|
|
|
create_users(
|
|
|
|
zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT, bot_owner=desdemona
|
|
|
|
)
|
2017-10-12 08:16:05 +02:00
|
|
|
|
2020-03-12 13:51:54 +01:00
|
|
|
zoe = get_user_by_delivery_email("zoe@zulip.com", zulip_realm)
|
2017-10-12 08:16:05 +02:00
|
|
|
zulip_webhook_bots = [
|
|
|
|
("Zulip Webhook Bot", "webhook-bot@zulip.com"),
|
|
|
|
]
|
2018-03-13 23:36:11 +01:00
|
|
|
# If a stream is not supplied in the webhook URL, the webhook
|
|
|
|
# will (in some cases) send the notification as a PM to the
|
|
|
|
# owner of the webhook bot, so bot_owner can't be None
|
2021-02-12 08:19:30 +01:00
|
|
|
create_users(
|
|
|
|
zulip_realm,
|
|
|
|
zulip_webhook_bots,
|
|
|
|
bot_type=UserProfile.INCOMING_WEBHOOK_BOT,
|
|
|
|
bot_owner=zoe,
|
|
|
|
)
|
2020-03-12 13:51:54 +01:00
|
|
|
aaron = get_user_by_delivery_email("AARON@zulip.com", zulip_realm)
|
2018-06-01 17:22:32 +02:00
|
|
|
|
2017-10-12 08:16:05 +02:00
|
|
|
zulip_outgoing_bots = [
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
("Outgoing Webhook", "outgoing-webhook@zulip.com"),
|
2017-10-12 08:16:05 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
create_users(
|
|
|
|
zulip_realm,
|
|
|
|
zulip_outgoing_bots,
|
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
bot_owner=aaron,
|
|
|
|
)
|
2018-06-01 17:22:32 +02:00
|
|
|
outgoing_webhook = get_user("outgoing-webhook@zulip.com", zulip_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
add_service(
|
|
|
|
"outgoing-webhook",
|
|
|
|
user_profile=outgoing_webhook,
|
|
|
|
interface=Service.GENERIC,
|
|
|
|
base_url="http://127.0.0.1:5002",
|
|
|
|
token=generate_api_key(),
|
|
|
|
)
|
2018-05-23 05:46:30 +02:00
|
|
|
|
2020-05-04 15:20:21 +02:00
|
|
|
# Add the realm internal bots to each realm.
|
2018-05-23 05:46:30 +02:00
|
|
|
create_if_missing_realm_internal_bots()
|
|
|
|
|
2024-03-29 13:00:12 +01:00
|
|
|
# Create streams.
|
2024-03-26 13:58:30 +01:00
|
|
|
zulip_discussion_channel_name = str(Realm.ZULIP_DISCUSSION_CHANNEL_NAME)
|
|
|
|
zulip_sandbox_channel_name = str(Realm.ZULIP_SANDBOX_CHANNEL_NAME)
|
|
|
|
|
2021-04-29 17:22:48 +02:00
|
|
|
stream_list = [
|
|
|
|
"Verona",
|
|
|
|
"Denmark",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Rome",
|
2024-03-29 13:00:12 +01:00
|
|
|
"core team",
|
2024-03-26 13:58:30 +01:00
|
|
|
zulip_discussion_channel_name,
|
|
|
|
zulip_sandbox_channel_name,
|
2021-04-29 17:22:48 +02:00
|
|
|
]
|
2024-07-12 02:30:17 +02:00
|
|
|
stream_dict: dict[str, dict[str, Any]] = {
|
2018-05-16 21:54:38 +02:00
|
|
|
"Denmark": {"description": "A Scandinavian country"},
|
2024-05-14 05:48:41 +02:00
|
|
|
"Scotland": {"description": "Located in the United Kingdom", "creator": iago},
|
|
|
|
"Venice": {"description": "A northeastern Italian city", "creator": polonius},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"Rome": {"description": "Yet another Italian city", "is_web_public": True},
|
2024-03-29 13:00:12 +01:00
|
|
|
"core team": {
|
|
|
|
"description": "A private channel for core team members",
|
|
|
|
"invite_only": True,
|
2024-05-14 05:48:41 +02:00
|
|
|
"creator": desdemona,
|
2024-03-29 13:00:12 +01:00
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2016-12-08 00:02:21 +01:00
|
|
|
|
2016-12-15 17:07:45 +01:00
|
|
|
bulk_create_streams(zulip_realm, stream_dict)
|
2024-07-12 02:30:17 +02:00
|
|
|
recipient_streams: list[int] = [
|
2021-02-12 08:19:30 +01:00
|
|
|
Stream.objects.get(name=name, realm=zulip_realm).id for name in stream_list
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-05-19 03:26:34 +02:00
|
|
|
|
2016-04-13 04:49:13 +02:00
|
|
|
# Create subscriptions to streams. The following
|
|
|
|
# algorithm will give each of the users a different but
|
|
|
|
# deterministic subset of the streams (given a fixed list
|
2018-05-19 03:26:34 +02:00
|
|
|
# of users). For the test suite, we have a fixed list of
|
|
|
|
# subscriptions to make sure test data is consistent
|
|
|
|
# across platforms.
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
subscriptions_list: list[tuple[UserProfile, Recipient]] = []
|
2021-07-26 16:46:53 +02:00
|
|
|
profiles: Sequence[UserProfile] = list(
|
2023-07-12 12:01:22 +02:00
|
|
|
UserProfile.objects.select_related("realm").filter(is_bot=False).order_by("email")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-19 03:26:34 +02:00
|
|
|
|
|
|
|
if options["test_suite"]:
|
|
|
|
subscriptions_map = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"AARON@zulip.com": ["Verona"],
|
|
|
|
"cordelia@zulip.com": ["Verona"],
|
2024-03-26 13:58:30 +01:00
|
|
|
"hamlet@zulip.com": [
|
|
|
|
"Verona",
|
|
|
|
"Denmark",
|
|
|
|
"core team",
|
|
|
|
zulip_discussion_channel_name,
|
|
|
|
zulip_sandbox_channel_name,
|
|
|
|
],
|
2021-04-29 17:22:48 +02:00
|
|
|
"iago@zulip.com": [
|
|
|
|
"Verona",
|
|
|
|
"Denmark",
|
|
|
|
"Scotland",
|
2024-03-29 13:00:12 +01:00
|
|
|
"core team",
|
2024-03-26 13:58:30 +01:00
|
|
|
zulip_discussion_channel_name,
|
|
|
|
zulip_sandbox_channel_name,
|
2021-04-29 17:22:48 +02:00
|
|
|
],
|
2021-02-12 08:20:45 +01:00
|
|
|
"othello@zulip.com": ["Verona", "Denmark", "Scotland"],
|
|
|
|
"prospero@zulip.com": ["Verona", "Denmark", "Scotland", "Venice"],
|
|
|
|
"ZOE@zulip.com": ["Verona", "Denmark", "Scotland", "Venice", "Rome"],
|
|
|
|
"polonius@zulip.com": ["Verona"],
|
2021-04-29 17:22:48 +02:00
|
|
|
"desdemona@zulip.com": [
|
|
|
|
"Verona",
|
|
|
|
"Denmark",
|
|
|
|
"Venice",
|
2024-03-29 13:00:12 +01:00
|
|
|
"core team",
|
2024-03-26 13:58:30 +01:00
|
|
|
zulip_discussion_channel_name,
|
|
|
|
zulip_sandbox_channel_name,
|
2021-04-29 17:22:48 +02:00
|
|
|
],
|
2020-12-22 15:46:00 +01:00
|
|
|
"shiva@zulip.com": ["Verona", "Denmark", "Scotland"],
|
2018-05-19 03:26:34 +02:00
|
|
|
}
|
|
|
|
|
2018-05-22 23:16:53 +02:00
|
|
|
for profile in profiles:
|
2020-03-12 13:51:54 +01:00
|
|
|
email = profile.delivery_email
|
|
|
|
if email not in subscriptions_map:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise Exception(f"Subscriptions not listed for user {email}")
|
2018-05-19 03:26:34 +02:00
|
|
|
|
2020-03-12 13:51:54 +01:00
|
|
|
for stream_name in subscriptions_map[email]:
|
2021-04-29 17:22:48 +02:00
|
|
|
stream = Stream.objects.get(name=stream_name, realm=zulip_realm)
|
2018-05-19 03:26:34 +02:00
|
|
|
r = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
|
2018-05-22 23:16:53 +02:00
|
|
|
subscriptions_list.append((profile, r))
|
2018-05-19 03:26:34 +02:00
|
|
|
else:
|
2019-12-24 12:25:25 +01:00
|
|
|
num_streams = len(recipient_streams)
|
|
|
|
num_users = len(profiles)
|
2018-05-19 03:26:34 +02:00
|
|
|
for i, profile in enumerate(profiles):
|
|
|
|
# Subscribe to some streams.
|
2019-12-24 12:25:25 +01:00
|
|
|
fraction = float(i) / num_users
|
|
|
|
num_recips = int(num_streams * fraction) + 1
|
|
|
|
|
|
|
|
for type_id in recipient_streams[:num_recips]:
|
2018-05-19 03:26:34 +02:00
|
|
|
r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id)
|
2018-05-22 23:16:53 +02:00
|
|
|
subscriptions_list.append((profile, r))
|
2018-05-19 03:26:34 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
subscriptions_to_add: list[Subscription] = []
|
2017-07-12 17:28:27 +02:00
|
|
|
event_time = timezone_now()
|
2024-07-12 02:30:17 +02:00
|
|
|
all_subscription_logs: list[RealmAuditLog] = []
|
2018-05-19 03:26:34 +02:00
|
|
|
|
2018-05-22 23:16:53 +02:00
|
|
|
i = 0
|
|
|
|
for profile, recipient in subscriptions_list:
|
|
|
|
i += 1
|
|
|
|
color = STREAM_ASSIGNMENT_COLORS[i % len(STREAM_ASSIGNMENT_COLORS)]
|
2021-02-14 00:03:40 +01:00
|
|
|
s = Subscription(
|
|
|
|
recipient=recipient,
|
|
|
|
user_profile=profile,
|
|
|
|
is_user_active=profile.is_active,
|
|
|
|
color=color,
|
|
|
|
)
|
2018-05-19 03:26:34 +02:00
|
|
|
|
|
|
|
subscriptions_to_add.append(s)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
log = RealmAuditLog(
|
|
|
|
realm=profile.realm,
|
|
|
|
modified_user=profile,
|
|
|
|
modified_stream_id=recipient.type_id,
|
|
|
|
event_last_message_id=0,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
2018-05-19 03:26:34 +02:00
|
|
|
all_subscription_logs.append(log)
|
2017-07-12 17:28:27 +02:00
|
|
|
|
2013-03-27 15:58:23 +01:00
|
|
|
Subscription.objects.bulk_create(subscriptions_to_add)
|
2017-07-12 17:28:27 +02:00
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
2018-03-12 01:51:07 +01:00
|
|
|
|
2018-03-19 20:17:52 +01:00
|
|
|
# Create custom profile field data
|
2021-02-12 08:19:30 +01:00
|
|
|
phone_number = try_add_realm_custom_profile_field(
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip_realm, "Phone number", CustomProfileField.SHORT_TEXT, hint=""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
biography = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm,
|
|
|
|
"Biography",
|
|
|
|
CustomProfileField.LONG_TEXT,
|
2021-02-12 08:20:45 +01:00
|
|
|
hint="What are you known for?",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
favorite_food = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm,
|
|
|
|
"Favorite food",
|
|
|
|
CustomProfileField.SHORT_TEXT,
|
|
|
|
hint="Or drink, if you'd prefer",
|
|
|
|
)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
field_data: ProfileFieldData = {
|
2022-06-30 19:19:21 +02:00
|
|
|
"0": {"text": "Vim", "order": "1"},
|
|
|
|
"1": {"text": "Emacs", "order": "2"},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
favorite_editor = try_add_realm_custom_profile_field(
|
2021-03-20 11:39:22 +01:00
|
|
|
zulip_realm, "Favorite editor", CustomProfileField.SELECT, field_data=field_data
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
birthday = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm, "Birthday", CustomProfileField.DATE
|
|
|
|
)
|
|
|
|
favorite_website = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm,
|
|
|
|
"Favorite website",
|
|
|
|
CustomProfileField.URL,
|
|
|
|
hint="Or your personal blog's URL",
|
|
|
|
)
|
|
|
|
mentor = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm, "Mentor", CustomProfileField.USER
|
|
|
|
)
|
2019-08-24 13:52:25 +02:00
|
|
|
github_profile = try_add_realm_default_custom_profile_field(zulip_realm, "github")
|
2022-10-01 12:16:11 +02:00
|
|
|
pronouns = try_add_realm_custom_profile_field(
|
|
|
|
zulip_realm,
|
|
|
|
"Pronouns",
|
|
|
|
CustomProfileField.PRONOUNS,
|
|
|
|
hint="What pronouns should people use to refer to you?",
|
|
|
|
)
|
2018-03-19 20:17:52 +01:00
|
|
|
|
|
|
|
# Fill in values for Iago and Hamlet
|
2020-03-12 13:51:54 +01:00
|
|
|
hamlet = get_user_by_delivery_email("hamlet@zulip.com", zulip_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_user_custom_profile_data_if_changed(
|
|
|
|
iago,
|
|
|
|
[
|
|
|
|
{"id": phone_number.id, "value": "+1-234-567-8901"},
|
|
|
|
{"id": biography.id, "value": "Betrayer of Othello."},
|
|
|
|
{"id": favorite_food.id, "value": "Apples"},
|
2022-06-30 19:19:21 +02:00
|
|
|
{"id": favorite_editor.id, "value": "1"},
|
2021-02-12 08:19:30 +01:00
|
|
|
{"id": birthday.id, "value": "2000-01-01"},
|
|
|
|
{"id": favorite_website.id, "value": "https://zulip.readthedocs.io/en/latest/"},
|
|
|
|
{"id": mentor.id, "value": [hamlet.id]},
|
2021-02-12 08:20:45 +01:00
|
|
|
{"id": github_profile.id, "value": "zulip"},
|
2022-10-01 12:16:11 +02:00
|
|
|
{"id": pronouns.id, "value": "he/him"},
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
do_update_user_custom_profile_data_if_changed(
|
|
|
|
hamlet,
|
|
|
|
[
|
|
|
|
{"id": phone_number.id, "value": "+0-11-23-456-7890"},
|
|
|
|
{
|
|
|
|
"id": biography.id,
|
|
|
|
"value": "I am:\n* The prince of Denmark\n* Nephew to the usurping Claudius",
|
|
|
|
},
|
|
|
|
{"id": favorite_food.id, "value": "Dark chocolate"},
|
2022-06-30 19:19:21 +02:00
|
|
|
{"id": favorite_editor.id, "value": "0"},
|
2021-02-12 08:19:30 +01:00
|
|
|
{"id": birthday.id, "value": "1900-01-01"},
|
|
|
|
{"id": favorite_website.id, "value": "https://blog.zulig.org"},
|
|
|
|
{"id": mentor.id, "value": [iago.id]},
|
2021-02-12 08:20:45 +01:00
|
|
|
{"id": github_profile.id, "value": "zulipbot"},
|
2022-10-01 12:16:11 +02:00
|
|
|
{"id": pronouns.id, "value": "he/him"},
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2023-04-28 13:35:00 +02:00
|
|
|
# We need to create at least one scheduled message for Iago for the api-test
|
|
|
|
# cURL example to delete an existing scheduled message.
|
|
|
|
check_schedule_message(
|
|
|
|
sender=iago,
|
|
|
|
client=get_client("populate_db"),
|
|
|
|
recipient_type_name="stream",
|
|
|
|
message_to=[Stream.objects.get(name="Denmark", realm=zulip_realm).id],
|
|
|
|
topic_name="test-api",
|
|
|
|
message_content="It's time to celebrate the anniversary of provisioning this development environment :tada:!",
|
|
|
|
deliver_at=timezone_now() + timedelta(days=365),
|
|
|
|
realm=zulip_realm,
|
|
|
|
)
|
2023-05-16 21:08:33 +02:00
|
|
|
check_schedule_message(
|
|
|
|
sender=iago,
|
|
|
|
client=get_client("populate_db"),
|
|
|
|
recipient_type_name="private",
|
|
|
|
message_to=[iago.id],
|
|
|
|
topic_name=None,
|
|
|
|
message_content="Note to self: It's been a while since you've provisioned this development environment.",
|
|
|
|
deliver_at=timezone_now() + timedelta(days=365),
|
|
|
|
realm=zulip_realm,
|
|
|
|
)
|
2023-08-11 01:10:21 +02:00
|
|
|
do_add_linkifier(
|
|
|
|
zulip_realm,
|
|
|
|
"#D(?P<id>[0-9]{2,8})",
|
|
|
|
"https://github.com/zulip/zulip-desktop/pull/{id}",
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_add_linkifier(
|
|
|
|
zulip_realm,
|
|
|
|
"zulip-mobile#(?P<id>[0-9]{2,8})",
|
|
|
|
"https://github.com/zulip/zulip-mobile/pull/{id}",
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_add_linkifier(
|
|
|
|
zulip_realm,
|
|
|
|
"zulip-(?P<repo>[a-zA-Z-_0-9]+)#(?P<id>[0-9]{2,8})",
|
|
|
|
"https://github.com/zulip/{repo}/pull/{id}",
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2012-09-10 20:38:29 +02:00
|
|
|
else:
|
2017-01-04 05:30:48 +01:00
|
|
|
zulip_realm = get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_streams = [
|
|
|
|
klass.type_id for klass in Recipient.objects.filter(type=Recipient.STREAM)
|
|
|
|
]
|
2012-09-10 20:38:29 +02:00
|
|
|
|
|
|
|
# Extract a list of all users
|
2024-07-12 02:30:17 +02:00
|
|
|
user_profiles: list[UserProfile] = list(
|
2022-09-22 21:30:52 +02:00
|
|
|
UserProfile.objects.filter(is_bot=False, realm=zulip_realm)
|
|
|
|
)
|
2016-12-09 20:18:52 +01:00
|
|
|
|
2023-10-22 11:03:03 +02:00
|
|
|
if options["test_suite"]:
|
|
|
|
# As we plan to change the default values for 'automatically_follow_topics_policy' and
|
|
|
|
# 'automatically_unmute_topics_in_muted_streams_policy' in the future, it will lead to
|
|
|
|
# skewing a lot of our tests, which now need to take into account extra events and database queries.
|
|
|
|
#
|
|
|
|
# We explicitly set the values for both settings to 'AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER'
|
|
|
|
# to make the tests independent of the default values.
|
|
|
|
#
|
|
|
|
# We have separate tests to verify events generated, database query counts,
|
|
|
|
# and other important details related to the above-mentioned settings.
|
2023-12-10 14:53:52 +01:00
|
|
|
#
|
|
|
|
# We set the value of 'automatically_follow_topics_where_mentioned' to 'False' so that it
|
|
|
|
# does not increase the number of events and db queries while running tests.
|
2023-10-22 11:03:03 +02:00
|
|
|
for user in user_profiles:
|
|
|
|
do_change_user_setting(
|
|
|
|
user,
|
|
|
|
"automatically_follow_topics_policy",
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_change_user_setting(
|
|
|
|
user,
|
|
|
|
"automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2023-12-10 14:53:52 +01:00
|
|
|
do_change_user_setting(
|
|
|
|
user,
|
|
|
|
"automatically_follow_topics_where_mentioned",
|
|
|
|
False,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
|
2017-07-14 14:56:53 +02:00
|
|
|
# Create a test realm emoji.
|
2021-02-12 08:20:45 +01:00
|
|
|
IMAGE_FILE_PATH = static_path("images/test-images/checkbox.png")
|
|
|
|
with open(IMAGE_FILE_PATH, "rb") as fp:
|
2024-07-11 20:46:33 +02:00
|
|
|
check_add_realm_emoji(
|
|
|
|
zulip_realm, "green_tick", iago, File(fp, name="checkbox.png"), "image/png"
|
|
|
|
)
|
2017-07-14 14:56:53 +02:00
|
|
|
|
2016-12-09 20:18:52 +01:00
|
|
|
if not options["test_suite"]:
|
|
|
|
# Populate users with some bar data
|
|
|
|
for user in user_profiles:
|
2017-04-15 04:03:56 +02:00
|
|
|
date = timezone_now()
|
2021-02-12 08:19:30 +01:00
|
|
|
UserPresence.objects.get_or_create(
|
|
|
|
user_profile=user,
|
|
|
|
realm_id=user.realm_id,
|
2020-06-11 16:03:47 +02:00
|
|
|
defaults={"last_active_time": date, "last_connected_time": date},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-12-09 20:18:52 +01:00
|
|
|
|
|
|
|
user_profiles_ids = [user_profile.id for user_profile in user_profiles]
|
2012-09-05 17:23:58 +02:00
|
|
|
|
2012-09-05 18:38:35 +02:00
|
|
|
# Create several initial huddles
|
2015-11-01 17:15:05 +01:00
|
|
|
for i in range(options["num_huddles"]):
|
2024-07-04 14:05:48 +02:00
|
|
|
get_or_create_direct_message_group(
|
|
|
|
random.sample(user_profiles_ids, random.randint(3, 4))
|
|
|
|
)
|
2012-09-05 18:38:35 +02:00
|
|
|
|
|
|
|
# Create several initial pairs for personals
|
2021-02-12 08:19:30 +01:00
|
|
|
personals_pairs = [
|
|
|
|
random.sample(user_profiles_ids, 2) for i in range(options["num_personals"])
|
|
|
|
]
|
2012-09-14 17:52:38 +02:00
|
|
|
|
2020-07-09 15:48:10 +02:00
|
|
|
create_alert_words(zulip_realm.id)
|
|
|
|
|
2017-08-05 20:10:11 +02:00
|
|
|
# Generate a new set of test data.
|
|
|
|
create_test_data()
|
|
|
|
|
2012-09-10 20:38:29 +02:00
|
|
|
if options["delete"]:
|
2015-08-19 05:43:13 +02:00
|
|
|
if options["test_suite"]:
|
|
|
|
# Create test users; the MIT ones are needed to test
|
|
|
|
# the Zephyr mirroring codepaths.
|
2022-05-14 13:07:14 +02:00
|
|
|
event_time = timezone_now()
|
2015-08-19 05:43:13 +02:00
|
|
|
testsuite_mit_users = [
|
|
|
|
("Fred Sipb (MIT)", "sipbtest@mit.edu"),
|
|
|
|
("Athena Consulting Exchange User (MIT)", "starnine@mit.edu"),
|
|
|
|
("Esp Classroom (MIT)", "espuser@mit.edu"),
|
2017-01-24 06:34:26 +01:00
|
|
|
]
|
2021-12-07 02:23:24 +01:00
|
|
|
create_users(
|
|
|
|
mit_realm, testsuite_mit_users, tos_version=settings.TERMS_OF_SERVICE_VERSION
|
|
|
|
)
|
2013-01-08 21:59:52 +01:00
|
|
|
|
2022-05-14 13:07:14 +02:00
|
|
|
mit_user = get_user_by_delivery_email("sipbtest@mit.edu", mit_realm)
|
2024-03-29 13:00:12 +01:00
|
|
|
bulk_create_streams(
|
|
|
|
mit_realm,
|
|
|
|
{
|
|
|
|
"core team": {
|
|
|
|
"description": "A private channel for core team members",
|
|
|
|
"invite_only": True,
|
|
|
|
}
|
|
|
|
},
|
2022-05-14 13:07:14 +02:00
|
|
|
)
|
2024-03-29 13:00:12 +01:00
|
|
|
core_team_stream = Stream.objects.get(name="core team", realm=mit_realm)
|
|
|
|
bulk_add_subscriptions(mit_realm, [core_team_stream], [mit_user], acting_user=None)
|
2022-05-14 13:07:14 +02:00
|
|
|
|
2017-11-28 02:42:11 +01:00
|
|
|
testsuite_lear_users = [
|
|
|
|
("King Lear", "king@lear.org"),
|
2021-04-11 16:26:54 +02:00
|
|
|
("Cordelia, Lear's daughter", "cordelia@zulip.com"),
|
2017-11-28 02:42:11 +01:00
|
|
|
]
|
2021-12-07 02:23:24 +01:00
|
|
|
create_users(
|
|
|
|
lear_realm, testsuite_lear_users, tos_version=settings.TERMS_OF_SERVICE_VERSION
|
|
|
|
)
|
2017-11-28 02:42:11 +01:00
|
|
|
|
2022-05-14 13:07:14 +02:00
|
|
|
lear_user = get_user_by_delivery_email("king@lear.org", lear_realm)
|
2024-03-29 13:00:12 +01:00
|
|
|
bulk_create_streams(
|
|
|
|
lear_realm,
|
|
|
|
{
|
|
|
|
"core team": {
|
|
|
|
"description": "A private channel for core team members",
|
|
|
|
"invite_only": True,
|
|
|
|
}
|
|
|
|
},
|
2022-05-14 13:07:14 +02:00
|
|
|
)
|
2024-03-29 13:00:12 +01:00
|
|
|
core_team_stream = Stream.objects.get(name="core team", realm=lear_realm)
|
2022-05-14 13:07:14 +02:00
|
|
|
bulk_add_subscriptions(
|
2024-03-29 13:00:12 +01:00
|
|
|
lear_realm, [core_team_stream], [lear_user], acting_user=None
|
2022-05-14 13:07:14 +02:00
|
|
|
)
|
|
|
|
|
2013-01-08 21:59:52 +01:00
|
|
|
if not options["test_suite"]:
|
|
|
|
# To keep the messages.json fixtures file for the test
|
|
|
|
# suite fast, don't add these users and subscriptions
|
|
|
|
# when running populate_db for the test suite
|
|
|
|
|
2021-04-02 04:53:08 +02:00
|
|
|
# to imitate emoji insertions in stream names
|
|
|
|
raw_emojis = ["😎", "😂", "🐱👤"]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
zulip_stream_dict: dict[str, dict[str, Any]] = {
|
2018-05-16 21:54:38 +02:00
|
|
|
"devel": {"description": "For developing"},
|
2021-04-02 04:53:08 +02:00
|
|
|
# ビデオゲーム - VideoGames (japanese)
|
2024-01-29 00:32:21 +01:00
|
|
|
"ビデオゲーム": {
|
2024-05-14 05:48:41 +02:00
|
|
|
"description": f"Share your favorite video games! {raw_emojis[2]}",
|
|
|
|
"creator": shiva,
|
2024-01-29 00:32:21 +01:00
|
|
|
},
|
2021-02-12 08:19:30 +01:00
|
|
|
"announce": {
|
|
|
|
"description": "For announcements",
|
2021-02-12 08:20:45 +01:00
|
|
|
"stream_post_policy": Stream.STREAM_POST_POLICY_ADMINS,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2024-05-14 05:48:41 +02:00
|
|
|
"design": {"description": "For design", "creator": hamlet},
|
2018-05-16 21:54:38 +02:00
|
|
|
"support": {"description": "For support"},
|
|
|
|
"social": {"description": "For socializing"},
|
2019-06-01 22:11:46 +02:00
|
|
|
"test": {"description": "For testing `code`"},
|
2018-05-16 21:54:38 +02:00
|
|
|
"errors": {"description": "For errors"},
|
2023-02-23 01:40:19 +01:00
|
|
|
# 조리법 - Recipes (Korean), Пельмени - Dumplings (Russian)
|
2023-12-05 18:45:07 +01:00
|
|
|
"조리법 " + raw_emojis[0]: {
|
|
|
|
"description": "Everything cooking, from pasta to Пельмени"
|
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-01-02 18:03:14 +01:00
|
|
|
|
2021-03-31 18:38:10 +02:00
|
|
|
extra_stream_names = [
|
|
|
|
"802.11a",
|
|
|
|
"Ad Hoc Network",
|
|
|
|
"Augmented Reality",
|
|
|
|
"Cycling",
|
|
|
|
"DPI",
|
|
|
|
"FAQ",
|
|
|
|
"FiFo",
|
|
|
|
"commits",
|
|
|
|
"Control panel",
|
|
|
|
"desktop",
|
|
|
|
"компьютеры",
|
|
|
|
"Data security",
|
|
|
|
"desktop",
|
|
|
|
"काम",
|
|
|
|
"discussions",
|
|
|
|
"Cloud storage",
|
|
|
|
"GCI",
|
|
|
|
"Vaporware",
|
|
|
|
"Recent Trends",
|
|
|
|
"issues",
|
|
|
|
"live",
|
|
|
|
"Health",
|
|
|
|
"mobile",
|
|
|
|
"空間",
|
|
|
|
"provision",
|
|
|
|
"hidrógeno",
|
|
|
|
"HR",
|
|
|
|
"アニメ",
|
|
|
|
]
|
2018-01-02 22:54:53 +01:00
|
|
|
|
2021-03-31 18:38:10 +02:00
|
|
|
# Add stream names and stream descriptions
|
2021-02-12 08:20:45 +01:00
|
|
|
for i in range(options["extra_streams"]):
|
2021-03-31 18:38:10 +02:00
|
|
|
extra_stream_name = random.choice(extra_stream_names) + " " + str(i)
|
2018-01-02 18:03:14 +01:00
|
|
|
|
2021-03-31 18:38:10 +02:00
|
|
|
# to imitate emoji insertions in stream names
|
|
|
|
if random.random() <= 0.15:
|
|
|
|
extra_stream_name += random.choice(raw_emojis)
|
2018-01-02 18:03:14 +01:00
|
|
|
|
|
|
|
zulip_stream_dict[extra_stream_name] = {
|
|
|
|
"description": "Auto-generated extra stream.",
|
|
|
|
}
|
|
|
|
|
2016-12-15 17:07:45 +01:00
|
|
|
bulk_create_streams(zulip_realm, zulip_stream_dict)
|
2024-02-07 12:13:02 +01:00
|
|
|
# Now that we've created the new_stream_announcements_stream, configure it properly.
|
2024-01-26 14:45:37 +01:00
|
|
|
# By default, 'New stream' & 'Zulip update' announcements are sent to the same stream.
|
|
|
|
announce_stream = get_stream("announce", zulip_realm)
|
|
|
|
zulip_realm.new_stream_announcements_stream = announce_stream
|
|
|
|
zulip_realm.zulip_update_announcements_stream = announce_stream
|
|
|
|
zulip_realm.save(
|
|
|
|
update_fields=[
|
|
|
|
"new_stream_announcements_stream",
|
|
|
|
"zulip_update_announcements_stream",
|
|
|
|
]
|
|
|
|
)
|
2013-01-08 21:59:52 +01:00
|
|
|
|
2013-07-08 23:24:26 +02:00
|
|
|
# Add a few default streams
|
2016-12-08 00:02:21 +01:00
|
|
|
for default_stream_name in ["design", "devel", "social", "support"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
DefaultStream.objects.create(
|
|
|
|
realm=zulip_realm, stream=get_stream(default_stream_name, zulip_realm)
|
|
|
|
)
|
2013-07-08 23:24:26 +02:00
|
|
|
|
2013-01-08 21:59:52 +01:00
|
|
|
# Now subscribe everyone to these streams
|
2018-11-29 17:25:58 +01:00
|
|
|
subscribe_users_to_streams(zulip_realm, zulip_stream_dict)
|
2013-01-08 21:59:52 +01:00
|
|
|
|
2017-11-13 07:49:01 +01:00
|
|
|
create_user_groups()
|
2018-07-09 09:14:00 +02:00
|
|
|
|
|
|
|
if not options["test_suite"]:
|
|
|
|
# We populate the analytics database here for
|
|
|
|
# development purpose only
|
2021-02-12 08:20:45 +01:00
|
|
|
call_command("populate_analytics_db")
|
2020-05-04 15:20:21 +02:00
|
|
|
|
|
|
|
threads = options["threads"]
|
2024-07-12 02:30:17 +02:00
|
|
|
jobs: list[tuple[int, list[list[int]], dict[str, Any], int]] = []
|
2020-05-04 15:20:21 +02:00
|
|
|
for i in range(threads):
|
|
|
|
count = options["num_messages"] // threads
|
|
|
|
if i < options["num_messages"] % threads:
|
|
|
|
count += 1
|
2022-02-15 23:45:41 +01:00
|
|
|
jobs.append((count, personals_pairs, options, random.randint(0, 10**10)))
|
2020-05-04 15:20:21 +02:00
|
|
|
|
|
|
|
for job in jobs:
|
|
|
|
generate_and_send_messages(job)
|
|
|
|
|
|
|
|
if options["delete"]:
|
2021-02-12 08:20:45 +01:00
|
|
|
if not options["test_suite"]:
|
2020-05-04 15:20:21 +02:00
|
|
|
# These bots are not needed by the test suite
|
|
|
|
# Also, we don't want interacting with each other
|
|
|
|
# in dev setup.
|
|
|
|
internal_zulip_users_nosubs = [
|
|
|
|
("Zulip Commit Bot", "commit-bot@zulip.com"),
|
|
|
|
("Zulip Trac Bot", "trac-bot@zulip.com"),
|
|
|
|
("Zulip Nagios Bot", "nagios-bot@zulip.com"),
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
create_users(
|
2022-02-16 17:37:00 +01:00
|
|
|
zulip_realm,
|
|
|
|
internal_zulip_users_nosubs,
|
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
|
|
|
bot_owner=desdemona,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-04 15:20:21 +02:00
|
|
|
|
2020-07-09 16:43:29 +02:00
|
|
|
mark_all_messages_as_read()
|
2012-09-10 20:38:29 +02:00
|
|
|
self.stdout.write("Successfully populated test database.\n")
|
2012-10-02 22:58:13 +02:00
|
|
|
|
2021-07-26 18:59:34 +02:00
|
|
|
push_notifications_logger.disabled = False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-09 16:43:29 +02:00
|
|
|
def mark_all_messages_as_read() -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2022-04-25 21:32:07 +02:00
|
|
|
We want to keep these flags mostly intact after we create
|
|
|
|
messages. The is_private flag, for example, would be bad to overwrite.
|
2020-07-09 16:43:29 +02:00
|
|
|
|
2022-04-25 21:32:07 +02:00
|
|
|
So we're careful to only toggle the read flag.
|
2020-07-09 16:43:29 +02:00
|
|
|
|
2022-04-25 21:32:07 +02:00
|
|
|
We exclude marking messages as read for bots, since bots, by
|
|
|
|
default, never mark messages as read.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-07-09 16:43:29 +02:00
|
|
|
# Mark all messages as read
|
2022-04-25 21:32:07 +02:00
|
|
|
UserMessage.objects.filter(user_profile__is_bot=False).update(
|
2021-02-12 08:20:45 +01:00
|
|
|
flags=F("flags").bitor(UserMessage.flags.read),
|
2020-07-09 16:43:29 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
recipient_hash: dict[int, Recipient] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2017-10-27 12:57:54 +02:00
|
|
|
def get_recipient_by_id(rid: int) -> Recipient:
|
2012-09-14 22:43:54 +02:00
|
|
|
if rid in recipient_hash:
|
|
|
|
return recipient_hash[rid]
|
|
|
|
return Recipient.objects.get(id=rid)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-10-03 21:07:40 +02:00
|
|
|
# Create some test messages, including:
|
2012-10-10 23:17:43 +02:00
|
|
|
# - multiple streams
|
2012-10-11 00:01:39 +02:00
|
|
|
# - multiple subjects per stream
|
2012-09-14 18:31:11 +02:00
|
|
|
# - multiple huddles
|
2023-01-02 20:50:23 +01:00
|
|
|
# - multiple personal conversations
|
2012-10-11 00:01:39 +02:00
|
|
|
# - multiple messages per subject
|
2012-09-14 18:31:11 +02:00
|
|
|
# - both single and multi-line content
|
2021-02-12 08:19:30 +01:00
|
|
|
def generate_and_send_messages(
|
2024-07-12 02:30:17 +02:00
|
|
|
data: tuple[int, Sequence[Sequence[int]], Mapping[str, Any], int],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> int:
|
2022-09-27 21:42:31 +02:00
|
|
|
realm = get_realm("zulip")
|
2021-07-26 17:01:00 +02:00
|
|
|
(tot_messages, personals_pairs, options, random_seed) = data
|
2017-08-05 20:10:11 +02:00
|
|
|
random.seed(random_seed)
|
2017-05-23 23:57:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with open(
|
2021-02-12 08:20:45 +01:00
|
|
|
os.path.join(get_or_create_dev_uuid_var_path("test-backend"), "test_messages.json"), "rb"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as infile:
|
2020-08-07 01:09:47 +02:00
|
|
|
dialog = orjson.loads(infile.read())
|
2017-08-05 20:10:11 +02:00
|
|
|
random.shuffle(dialog)
|
2017-05-23 23:57:42 +02:00
|
|
|
texts = itertools.cycle(dialog)
|
2012-09-14 18:31:11 +02:00
|
|
|
|
2021-03-08 13:22:43 +01:00
|
|
|
# We need to filter out streams from the analytics realm as we don't want to generate
|
|
|
|
# messages to its streams - and they might also have no subscribers, which would break
|
|
|
|
# our message generation mechanism below.
|
2022-09-27 21:42:31 +02:00
|
|
|
stream_ids = Stream.objects.filter(realm=realm).values_list("id", flat=True)
|
2024-07-12 02:30:17 +02:00
|
|
|
recipient_streams: list[int] = [
|
2021-03-08 13:22:43 +01:00
|
|
|
recipient.id
|
|
|
|
for recipient in Recipient.objects.filter(type=Recipient.STREAM, type_id__in=stream_ids)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2024-07-12 02:30:17 +02:00
|
|
|
recipient_huddles: list[int] = [
|
2024-03-22 00:39:33 +01:00
|
|
|
h.id for h in Recipient.objects.filter(type=Recipient.DIRECT_MESSAGE_GROUP)
|
|
|
|
]
|
2012-09-14 18:31:11 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
huddle_members: dict[int, list[int]] = {}
|
2012-09-14 18:31:11 +02:00
|
|
|
for h in recipient_huddles:
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle_members[h] = [s.user_profile.id for s in Subscription.objects.filter(recipient_id=h)]
|
2012-09-14 18:31:11 +02:00
|
|
|
|
2020-05-05 07:55:47 +02:00
|
|
|
# Generate different topics for each stream
|
2024-01-15 09:28:17 +01:00
|
|
|
possible_topic_names = {}
|
2020-05-05 07:55:47 +02:00
|
|
|
for stream_id in recipient_streams:
|
2022-06-08 21:30:15 +02:00
|
|
|
# We want the test suite to have a predictable database state,
|
|
|
|
# since some tests depend on it; but for actual development,
|
|
|
|
# we want some streams to have more topics than others for
|
|
|
|
# realistic variety.
|
|
|
|
if not options["test_suite"]:
|
|
|
|
num_topics = random.randint(1, options["max_topics"])
|
|
|
|
else:
|
|
|
|
num_topics = options["max_topics"]
|
|
|
|
|
2024-01-15 09:28:17 +01:00
|
|
|
possible_topic_names[stream_id] = generate_topics(num_topics)
|
2020-05-05 07:55:47 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
message_batch_size = options["batch_size"]
|
2012-10-03 21:05:48 +02:00
|
|
|
num_messages = 0
|
2012-09-14 18:31:11 +02:00
|
|
|
random_max = 1000000
|
2024-07-12 02:30:17 +02:00
|
|
|
recipients: dict[int, tuple[int, int, dict[str, Any]]] = {}
|
|
|
|
messages: list[Message] = []
|
2012-10-03 21:05:48 +02:00
|
|
|
while num_messages < tot_messages:
|
2024-07-12 02:30:17 +02:00
|
|
|
saved_data: dict[str, Any] = {}
|
2022-09-27 21:42:31 +02:00
|
|
|
message = Message(realm=realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
message.sending_client = get_client("populate_db")
|
2017-05-23 23:57:42 +02:00
|
|
|
|
|
|
|
message.content = next(texts)
|
2012-09-14 18:31:11 +02:00
|
|
|
|
|
|
|
randkey = random.randint(1, random_max)
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
num_messages > 0
|
2022-07-01 05:40:34 +02:00
|
|
|
and random.randint(1, random_max) * 100.0 / random_max < options["stickiness"]
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2012-09-14 18:31:11 +02:00
|
|
|
# Use an old recipient
|
2012-10-03 21:05:48 +02:00
|
|
|
message_type, recipient_id, saved_data = recipients[num_messages - 1]
|
|
|
|
if message_type == Recipient.PERSONAL:
|
2021-02-12 08:20:45 +01:00
|
|
|
personals_pair = saved_data["personals_pair"]
|
2012-09-14 18:31:11 +02:00
|
|
|
random.shuffle(personals_pair)
|
2012-10-10 22:57:21 +02:00
|
|
|
elif message_type == Recipient.STREAM:
|
2021-02-12 08:20:45 +01:00
|
|
|
message.subject = saved_data["subject"]
|
2012-10-03 21:05:48 +02:00
|
|
|
message.recipient = get_recipient_by_id(recipient_id)
|
2024-03-22 00:39:33 +01:00
|
|
|
elif message_type == Recipient.DIRECT_MESSAGE_GROUP:
|
2012-10-03 21:05:48 +02:00
|
|
|
message.recipient = get_recipient_by_id(recipient_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
elif randkey <= random_max * options["percent_huddles"] / 100.0:
|
2024-03-22 00:39:33 +01:00
|
|
|
message_type = Recipient.DIRECT_MESSAGE_GROUP
|
2012-10-03 21:05:48 +02:00
|
|
|
message.recipient = get_recipient_by_id(random.choice(recipient_huddles))
|
2021-02-12 08:19:30 +01:00
|
|
|
elif (
|
|
|
|
randkey
|
|
|
|
<= random_max * (options["percent_huddles"] + options["percent_personals"]) / 100.0
|
|
|
|
):
|
2012-10-03 21:05:48 +02:00
|
|
|
message_type = Recipient.PERSONAL
|
2012-09-14 18:31:11 +02:00
|
|
|
personals_pair = random.choice(personals_pairs)
|
|
|
|
random.shuffle(personals_pair)
|
2021-02-12 08:19:30 +01:00
|
|
|
elif randkey <= random_max * 1.0:
|
2012-10-10 22:57:21 +02:00
|
|
|
message_type = Recipient.STREAM
|
2012-10-10 23:17:43 +02:00
|
|
|
message.recipient = get_recipient_by_id(random.choice(recipient_streams))
|
2012-10-03 21:05:48 +02:00
|
|
|
|
2024-03-22 00:39:33 +01:00
|
|
|
if message_type == Recipient.DIRECT_MESSAGE_GROUP:
|
2012-10-03 21:05:48 +02:00
|
|
|
sender_id = random.choice(huddle_members[message.recipient.id])
|
|
|
|
message.sender = get_user_profile_by_id(sender_id)
|
|
|
|
elif message_type == Recipient.PERSONAL:
|
2021-02-12 08:19:30 +01:00
|
|
|
message.recipient = Recipient.objects.get(
|
|
|
|
type=Recipient.PERSONAL, type_id=personals_pair[0]
|
|
|
|
)
|
2012-10-03 21:05:48 +02:00
|
|
|
message.sender = get_user_profile_by_id(personals_pair[1])
|
2021-02-12 08:20:45 +01:00
|
|
|
saved_data["personals_pair"] = personals_pair
|
2012-10-10 22:57:21 +02:00
|
|
|
elif message_type == Recipient.STREAM:
|
2012-10-10 23:17:43 +02:00
|
|
|
# Pick a random subscriber to the stream
|
2021-02-12 08:19:30 +01:00
|
|
|
message.sender = random.choice(
|
2021-07-26 16:46:53 +02:00
|
|
|
list(Subscription.objects.filter(recipient=message.recipient))
|
2021-02-12 08:19:30 +01:00
|
|
|
).user_profile
|
2024-01-15 09:28:17 +01:00
|
|
|
message.subject = random.choice(possible_topic_names[message.recipient.id])
|
2021-02-12 08:20:45 +01:00
|
|
|
saved_data["subject"] = message.subject
|
2012-09-14 18:31:11 +02:00
|
|
|
|
2022-04-05 00:16:17 +02:00
|
|
|
message.date_sent = choose_date_sent(
|
|
|
|
num_messages, tot_messages, options["oldest_message_days"], options["threads"]
|
|
|
|
)
|
2019-07-12 00:49:19 +02:00
|
|
|
messages.append(message)
|
2012-09-14 18:31:11 +02:00
|
|
|
|
2016-01-26 02:19:31 +01:00
|
|
|
recipients[num_messages] = (message_type, message.recipient.id, saved_data)
|
2012-10-03 21:05:48 +02:00
|
|
|
num_messages += 1
|
2019-07-12 00:49:19 +02:00
|
|
|
|
2019-07-22 14:45:34 +02:00
|
|
|
if (num_messages % message_batch_size) == 0:
|
|
|
|
# Send the batch and empty the list:
|
|
|
|
send_messages(messages)
|
|
|
|
messages = []
|
2019-07-12 18:10:52 +02:00
|
|
|
|
2019-07-22 14:45:34 +02:00
|
|
|
if len(messages) > 0:
|
|
|
|
# If there are unsent messages after exiting the loop, send them:
|
|
|
|
send_messages(messages)
|
2019-07-12 00:49:19 +02:00
|
|
|
|
2012-10-03 21:05:48 +02:00
|
|
|
return tot_messages
|
2017-01-27 00:06:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def send_messages(messages: list[Message]) -> None:
|
2019-07-22 14:45:34 +02:00
|
|
|
# We disable USING_RABBITMQ here, so that deferred work is
|
|
|
|
# executed in do_send_message_messages, rather than being
|
2023-03-04 02:17:54 +01:00
|
|
|
# queued. This is important, because otherwise, if run-dev
|
2019-07-22 14:45:34 +02:00
|
|
|
# wasn't running when populate_db was run, a developer can end
|
|
|
|
# up with queued events that reference objects from a previous
|
|
|
|
# life of the database, which naturally throws exceptions.
|
|
|
|
settings.USING_RABBITMQ = False
|
2023-07-31 22:52:35 +02:00
|
|
|
do_send_messages([build_message_send_dict(message=message) for message in messages])
|
2020-08-29 09:11:16 +02:00
|
|
|
bulk_create_reactions(messages)
|
2019-07-22 14:45:34 +02:00
|
|
|
settings.USING_RABBITMQ = True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_message_to_users(message_ids: list[int]) -> dict[int, list[int]]:
|
2020-11-13 22:31:41 +01:00
|
|
|
rows = UserMessage.objects.filter(
|
|
|
|
message_id__in=message_ids,
|
|
|
|
).values("message_id", "user_profile_id")
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, list[int]] = defaultdict(list)
|
2020-11-13 22:31:41 +01:00
|
|
|
|
|
|
|
for row in rows:
|
|
|
|
result[row["message_id"]].append(row["user_profile_id"])
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def bulk_create_reactions(all_messages: list[Message]) -> None:
|
|
|
|
reactions: list[Reaction] = []
|
2020-11-13 22:31:41 +01:00
|
|
|
|
|
|
|
num_messages = int(0.2 * len(all_messages))
|
|
|
|
messages = random.sample(all_messages, num_messages)
|
|
|
|
message_ids = [message.id for message in messages]
|
|
|
|
|
|
|
|
message_to_users = get_message_to_users(message_ids)
|
|
|
|
|
|
|
|
for message_id in message_ids:
|
|
|
|
msg_user_ids = message_to_users[message_id]
|
|
|
|
|
|
|
|
if msg_user_ids:
|
|
|
|
# Now let between 1 and 7 users react.
|
|
|
|
#
|
|
|
|
# Ideally, we'd make exactly 1 reaction more common than
|
|
|
|
# this algorithm generates.
|
|
|
|
max_num_users = min(7, len(msg_user_ids))
|
|
|
|
num_users = random.randrange(1, max_num_users + 1)
|
|
|
|
user_ids = random.sample(msg_user_ids, num_users)
|
|
|
|
|
|
|
|
for user_id in user_ids:
|
|
|
|
# each user does between 1 and 3 emojis
|
|
|
|
num_emojis = random.choice([1, 2, 3])
|
|
|
|
emojis = random.sample(DEFAULT_EMOJIS, num_emojis)
|
|
|
|
|
|
|
|
for emoji_name, emoji_code in emojis:
|
|
|
|
reaction = Reaction(
|
|
|
|
user_profile_id=user_id,
|
|
|
|
message_id=message_id,
|
|
|
|
emoji_name=emoji_name,
|
|
|
|
emoji_code=emoji_code,
|
2021-02-12 08:19:30 +01:00
|
|
|
reaction_type=Reaction.UNICODE_EMOJI,
|
2020-11-13 22:31:41 +01:00
|
|
|
)
|
|
|
|
reactions.append(reaction)
|
|
|
|
|
|
|
|
Reaction.objects.bulk_create(reactions)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2022-04-05 00:16:17 +02:00
|
|
|
def choose_date_sent(
|
|
|
|
num_messages: int, tot_messages: int, oldest_message_days: int, threads: int
|
|
|
|
) -> datetime:
|
2019-07-11 02:14:40 +02:00
|
|
|
# Spoofing time not supported with threading
|
|
|
|
if threads != 1:
|
|
|
|
return timezone_now()
|
|
|
|
|
2022-04-05 00:16:17 +02:00
|
|
|
# We want to ensure that:
|
|
|
|
# (1) some messages are sent in the last 4 hours,
|
|
|
|
# (2) there are some >24hr gaps between adjacent messages, and
|
|
|
|
# (3) a decent bulk of messages in the last day so you see adjacent messages with the same date.
|
|
|
|
# So we distribute 80% of messages starting from oldest_message_days days ago, over a period
|
|
|
|
# of the first min(oldest_message_days-2, 1) of those days. Then, distributes remaining messages
|
|
|
|
# over the past 24 hours.
|
2019-07-11 02:14:40 +02:00
|
|
|
amount_in_first_chunk = int(tot_messages * 0.8)
|
|
|
|
amount_in_second_chunk = tot_messages - amount_in_first_chunk
|
2022-04-05 00:16:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if num_messages < amount_in_first_chunk:
|
2023-03-04 01:50:20 +01:00
|
|
|
spoofed_date = timezone_now() - timedelta(days=oldest_message_days)
|
2022-04-05 00:16:17 +02:00
|
|
|
num_days_for_first_chunk = min(oldest_message_days - 2, 1)
|
|
|
|
interval_size = num_days_for_first_chunk * 24 * 60 * 60 / amount_in_first_chunk
|
2019-07-11 02:14:40 +02:00
|
|
|
lower_bound = interval_size * num_messages
|
|
|
|
upper_bound = interval_size * (num_messages + 1)
|
|
|
|
|
|
|
|
else:
|
2022-04-05 00:16:17 +02:00
|
|
|
# We're in the last 20% of messages, so distribute them over the last 24 hours:
|
2023-03-04 01:50:20 +01:00
|
|
|
spoofed_date = timezone_now() - timedelta(days=1)
|
2019-07-11 02:14:40 +02:00
|
|
|
interval_size = 24 * 60 * 60 / amount_in_second_chunk
|
|
|
|
lower_bound = interval_size * (num_messages - amount_in_first_chunk)
|
|
|
|
upper_bound = interval_size * (num_messages - amount_in_first_chunk + 1)
|
|
|
|
|
|
|
|
offset_seconds = random.uniform(lower_bound, upper_bound)
|
2023-03-04 01:50:20 +01:00
|
|
|
spoofed_date += timedelta(seconds=offset_seconds)
|
2019-07-11 02:14:40 +02:00
|
|
|
|
|
|
|
return spoofed_date
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 12:08:59 +01:00
|
|
|
def create_user_groups() -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip = get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
members = [
|
2021-02-12 08:20:45 +01:00
|
|
|
get_user_by_delivery_email("cordelia@zulip.com", zulip),
|
|
|
|
get_user_by_delivery_email("hamlet@zulip.com", zulip),
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2022-12-14 06:53:21 +01:00
|
|
|
create_user_group_in_database(
|
2022-11-21 03:37:11 +01:00
|
|
|
"hamletcharacters", members, zulip, description="Characters of Hamlet", acting_user=None
|
|
|
|
)
|