2018-05-11 01:40:23 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Mapping, Optional, Set, Tuple
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.initial_password import initial_password
|
|
|
|
from zerver.models import Realm, Stream, UserProfile, Huddle, \
|
2017-02-15 04:35:10 +01:00
|
|
|
Subscription, Recipient, Client, RealmAuditLog, get_huddle_hash
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.create_user import create_user_profile
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_create_users(realm: Realm,
|
2018-05-11 01:40:23 +02:00
|
|
|
users_raw: Set[Tuple[str, str, str, bool]],
|
2017-11-05 11:15:10 +01:00
|
|
|
bot_type: Optional[int]=None,
|
|
|
|
bot_owner: Optional[UserProfile]=None,
|
2018-05-11 01:40:23 +02:00
|
|
|
tos_version: Optional[str]=None,
|
|
|
|
timezone: str="") -> None:
|
2013-01-10 21:50:09 +01:00
|
|
|
"""
|
2013-04-01 16:57:50 +02:00
|
|
|
Creates and saves a UserProfile with the given email.
|
2013-01-10 21:50:09 +01:00
|
|
|
Has some code based off of UserManage.create_user, but doesn't .save()
|
|
|
|
"""
|
2017-11-22 05:18:48 +01:00
|
|
|
existing_users = frozenset(UserProfile.objects.filter(
|
|
|
|
realm=realm).values_list('email', flat=True))
|
2016-12-15 17:37:15 +01:00
|
|
|
users = sorted([user_raw for user_raw in users_raw if user_raw[0] not in existing_users])
|
2013-01-10 21:50:09 +01:00
|
|
|
|
|
|
|
# Now create user_profiles
|
2017-05-07 17:01:57 +02:00
|
|
|
profiles_to_create = [] # type: List[UserProfile]
|
2013-01-10 21:50:09 +01:00
|
|
|
for (email, full_name, short_name, active) in users:
|
2016-12-15 17:25:59 +01:00
|
|
|
profile = create_user_profile(realm, email,
|
2016-05-18 20:23:03 +02:00
|
|
|
initial_password(email), active, bot_type,
|
2017-09-26 11:34:31 +02:00
|
|
|
full_name, short_name, bot_owner, False, tos_version,
|
2017-05-04 15:19:06 +02:00
|
|
|
timezone, tutorial_status=UserProfile.TUTORIAL_FINISHED,
|
2017-03-29 23:21:44 +02:00
|
|
|
enter_sends=True)
|
2013-01-10 21:50:09 +01:00
|
|
|
profiles_to_create.append(profile)
|
2013-03-27 15:58:23 +01:00
|
|
|
UserProfile.objects.bulk_create(profiles_to_create)
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.bulk_create(
|
2017-11-22 05:18:48 +01:00
|
|
|
[RealmAuditLog(realm=realm, modified_user=profile_,
|
2017-02-15 04:35:10 +01:00
|
|
|
event_type='user_created', event_time=profile_.date_joined)
|
|
|
|
for profile_ in profiles_to_create])
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
profiles_by_email = {} # type: Dict[str, UserProfile]
|
2017-05-07 17:01:57 +02:00
|
|
|
profiles_by_id = {} # type: Dict[int, UserProfile]
|
2017-11-22 05:18:48 +01:00
|
|
|
for profile in UserProfile.objects.select_related().filter(realm=realm):
|
2013-03-28 20:43:34 +01:00
|
|
|
profiles_by_email[profile.email] = profile
|
2013-04-01 17:07:41 +02:00
|
|
|
profiles_by_id[profile.id] = profile
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2017-05-07 17:01:57 +02:00
|
|
|
recipients_to_create = [] # type: List[Recipient]
|
2016-06-04 00:52:29 +02:00
|
|
|
for (email, full_name, short_name, active) in users:
|
2013-01-10 21:50:09 +01:00
|
|
|
recipients_to_create.append(Recipient(type_id=profiles_by_email[email].id,
|
|
|
|
type=Recipient.PERSONAL))
|
2013-03-27 15:58:23 +01:00
|
|
|
Recipient.objects.bulk_create(recipients_to_create)
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
recipients_by_email = {} # type: Dict[str, Recipient]
|
2017-11-22 06:05:25 +01:00
|
|
|
for recipient in recipients_to_create:
|
2013-03-28 20:43:34 +01:00
|
|
|
recipients_by_email[profiles_by_id[recipient.type_id].email] = recipient
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2017-05-07 17:01:57 +02:00
|
|
|
subscriptions_to_create = [] # type: List[Subscription]
|
2016-06-04 00:52:29 +02:00
|
|
|
for (email, full_name, short_name, active) in users:
|
2013-01-10 21:50:09 +01:00
|
|
|
subscriptions_to_create.append(
|
|
|
|
Subscription(user_profile_id=profiles_by_email[email].id,
|
|
|
|
recipient=recipients_by_email[email]))
|
2013-03-27 15:58:23 +01:00
|
|
|
Subscription.objects.bulk_create(subscriptions_to_create)
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2018-05-10 01:27:35 +02:00
|
|
|
# This is only sed in populate_db, so doesn't realy need tests
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_create_streams(realm: Realm,
|
2018-05-11 01:40:23 +02:00
|
|
|
stream_dict: Dict[str, Dict[str, Any]]) -> None: # nocoverage
|
2016-12-15 02:38:14 +01:00
|
|
|
existing_streams = frozenset([name.lower() for name in
|
|
|
|
Stream.objects.filter(realm=realm)
|
|
|
|
.values_list('name', flat=True)])
|
2017-05-07 17:01:57 +02:00
|
|
|
streams_to_create = [] # type: List[Stream]
|
2016-12-08 00:02:21 +01:00
|
|
|
for name, options in stream_dict.items():
|
2018-05-02 17:46:18 +02:00
|
|
|
if 'history_public_to_subscribers' not in options:
|
|
|
|
options['history_public_to_subscribers'] = (
|
2018-05-16 21:34:43 +02:00
|
|
|
not options.get("invite_only", False) and not realm.is_zephyr_mirror_realm)
|
2016-12-15 16:57:21 +01:00
|
|
|
if name.lower() not in existing_streams:
|
2016-12-08 00:02:21 +01:00
|
|
|
streams_to_create.append(
|
|
|
|
Stream(
|
2018-05-02 17:46:18 +02:00
|
|
|
realm=realm,
|
|
|
|
name=name,
|
|
|
|
description=options["description"],
|
2018-05-16 21:34:43 +02:00
|
|
|
invite_only=options.get("invite_only", False),
|
2018-05-16 21:25:18 +02:00
|
|
|
is_announcement_only=options.get("is_announcement_only", False),
|
2018-05-02 17:46:18 +02:00
|
|
|
history_public_to_subscribers=options["history_public_to_subscribers"],
|
2018-05-16 21:54:38 +02:00
|
|
|
is_web_public=options.get("is_web_public", False),
|
2017-10-08 21:16:51 +02:00
|
|
|
is_in_zephyr_realm=realm.is_zephyr_mirror_realm,
|
2016-12-08 00:02:21 +01:00
|
|
|
)
|
|
|
|
)
|
2017-07-16 22:46:34 +02:00
|
|
|
# Sort streams by name before creating them so that we can have a
|
|
|
|
# reliable ordering of `stream_id` across different python versions.
|
|
|
|
# This is required for test fixtures which contain `stream_id`. Prior
|
|
|
|
# to python 3.3 hashes were not randomized but after a security fix
|
|
|
|
# hash randomization was enabled in python 3.3 which made iteration
|
|
|
|
# of dictionaries and sets completely unpredictable. Here the order
|
|
|
|
# of elements while iterating `stream_dict` will be completely random
|
|
|
|
# for python 3.3 and later versions.
|
|
|
|
streams_to_create.sort(key=lambda x: x.name)
|
2013-03-27 15:58:23 +01:00
|
|
|
Stream.objects.bulk_create(streams_to_create)
|
2013-01-10 21:50:09 +01:00
|
|
|
|
2017-05-07 17:01:57 +02:00
|
|
|
recipients_to_create = [] # type: List[Recipient]
|
2016-12-15 02:38:14 +01:00
|
|
|
for stream in Stream.objects.filter(realm=realm).values('id', 'name'):
|
|
|
|
if stream['name'].lower() not in existing_streams:
|
|
|
|
recipients_to_create.append(Recipient(type_id=stream['id'],
|
2013-01-10 21:50:09 +01:00
|
|
|
type=Recipient.STREAM))
|
2013-03-27 15:58:23 +01:00
|
|
|
Recipient.objects.bulk_create(recipients_to_create)
|