2022-04-14 23:53:15 +02:00
|
|
|
from collections import defaultdict
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import timedelta
|
2022-04-14 23:53:15 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
|
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import override as override_language
|
|
|
|
|
|
|
|
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat
|
|
|
|
from confirmation import settings as confirmation_settings
|
|
|
|
from zerver.actions.invites import notify_invites_changed
|
|
|
|
from zerver.actions.message_send import internal_send_private_message, internal_send_stream_message
|
|
|
|
from zerver.actions.streams import bulk_add_subscriptions, send_peer_subscriber_events
|
|
|
|
from zerver.actions.user_groups import do_send_user_group_members_update_event
|
|
|
|
from zerver.actions.users import change_user_is_active, get_service_dicts_for_bot
|
|
|
|
from zerver.lib.avatar import avatar_url
|
|
|
|
from zerver.lib.create_user import create_user
|
2023-07-09 13:30:19 +02:00
|
|
|
from zerver.lib.default_streams import get_slim_realm_default_streams
|
2023-06-30 13:27:25 +02:00
|
|
|
from zerver.lib.email_notifications import enqueue_welcome_emails, send_account_registered_email
|
2023-09-25 21:58:33 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.lib.mention import silent_mention_syntax_for_user
|
2023-12-09 15:00:30 +01:00
|
|
|
from zerver.lib.remote_server import maybe_enqueue_audit_log_upload
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.lib.send_email import clear_scheduled_invitation_emails
|
|
|
|
from zerver.lib.stream_subscription import bulk_get_subscriber_peer_info
|
|
|
|
from zerver.lib.user_counts import realm_user_count, realm_user_count_by_role
|
|
|
|
from zerver.lib.user_groups import get_system_user_group_for_user
|
2021-12-11 08:17:57 +01:00
|
|
|
from zerver.lib.users import (
|
|
|
|
can_access_delivery_email,
|
|
|
|
format_user_row,
|
|
|
|
get_api_key,
|
2023-11-03 04:39:40 +01:00
|
|
|
get_data_for_inaccessible_user,
|
|
|
|
user_access_restricted_in_realm,
|
2021-12-11 08:17:57 +01:00
|
|
|
user_profile_to_user_row,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.models import (
|
|
|
|
DefaultStreamGroup,
|
|
|
|
Message,
|
2023-03-10 11:42:18 +01:00
|
|
|
PreregistrationRealm,
|
2022-04-14 23:53:15 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import active_user_ids, bot_owner_user_ids, get_system_bot
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
from zerver.tornado.django_api import send_event_on_commit
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2023-07-10 12:40:31 +02:00
|
|
|
MAX_NUM_ONBOARDING_MESSAGES = 1000
|
|
|
|
MAX_NUM_ONBOARDING_UNREAD_MESSAGES = 20
|
|
|
|
|
2023-07-11 23:06:42 +02:00
|
|
|
# We don't want to mark years-old messages as unread, since that might
|
|
|
|
# feel like Zulip is buggy, but in low-traffic or bursty-traffic
|
|
|
|
# organizations, it's reasonable for the most recent 20 messages to be
|
|
|
|
# several weeks old and still be a good place to start.
|
2023-11-19 19:45:19 +01:00
|
|
|
ONBOARDING_RECENT_TIMEDELTA = timedelta(weeks=12)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-11-17 02:43:58 +01:00
|
|
|
DEFAULT_HISTORICAL_FLAGS = UserMessage.flags.historical | UserMessage.flags.read
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-11-17 02:43:58 +01:00
|
|
|
|
2022-11-17 00:50:22 +01:00
|
|
|
def create_historical_user_messages(
|
|
|
|
*, user_id: int, message_ids: Iterable[int], flags: int = DEFAULT_HISTORICAL_FLAGS
|
|
|
|
) -> None:
|
2022-04-14 23:53:15 +02:00
|
|
|
# Users can see and interact with messages sent to streams with
|
|
|
|
# public history for which they do not have a UserMessage because
|
|
|
|
# they were not a subscriber at the time the message was sent.
|
|
|
|
# In order to add emoji reactions or mutate message flags for
|
|
|
|
# those messages, we create UserMessage objects for those messages;
|
|
|
|
# these have the special historical flag which keeps track of the
|
|
|
|
# fact that the user did not receive the message at the time it was sent.
|
2022-11-17 02:43:58 +01:00
|
|
|
UserMessage.objects.bulk_create(
|
2022-11-17 00:50:22 +01:00
|
|
|
UserMessage(user_profile_id=user_id, message_id=message_id, flags=flags)
|
2022-11-17 02:43:58 +01:00
|
|
|
for message_id in message_ids
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def send_message_to_signup_notification_stream(
|
2022-07-19 11:43:10 +02:00
|
|
|
sender: UserProfile, realm: Realm, message: str
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> None:
|
|
|
|
signup_notifications_stream = realm.get_signup_notifications_stream()
|
|
|
|
if signup_notifications_stream is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
with override_language(realm.default_language):
|
2022-07-19 11:43:10 +02:00
|
|
|
topic_name = _("signups")
|
|
|
|
|
|
|
|
internal_send_stream_message(sender, signup_notifications_stream, topic_name, message)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def notify_new_user(user_profile: UserProfile) -> None:
|
|
|
|
user_count = realm_user_count(user_profile.realm)
|
|
|
|
sender_email = settings.NOTIFICATION_BOT
|
|
|
|
sender = get_system_bot(sender_email, user_profile.realm_id)
|
|
|
|
|
|
|
|
is_first_user = user_count == 1
|
|
|
|
if not is_first_user:
|
2022-07-19 11:43:10 +02:00
|
|
|
with override_language(user_profile.realm.default_language):
|
|
|
|
message = _("{user} just signed up for Zulip. (total: {user_count})").format(
|
|
|
|
user=silent_mention_syntax_for_user(user_profile), user_count=user_count
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.registration import generate_licenses_low_warning_message_if_required
|
|
|
|
|
|
|
|
licenses_low_warning_message = generate_licenses_low_warning_message_if_required(
|
|
|
|
user_profile.realm
|
|
|
|
)
|
|
|
|
if licenses_low_warning_message is not None:
|
|
|
|
message += "\n"
|
|
|
|
message += licenses_low_warning_message
|
|
|
|
|
|
|
|
send_message_to_signup_notification_stream(sender, user_profile.realm, message)
|
|
|
|
|
|
|
|
|
2023-07-09 11:42:07 +02:00
|
|
|
def set_up_streams_for_new_human_user(
|
|
|
|
*,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
prereg_user: Optional[PreregistrationUser] = None,
|
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
|
|
|
) -> None:
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
|
|
|
if prereg_user is not None:
|
|
|
|
streams: List[Stream] = list(prereg_user.streams.all())
|
|
|
|
acting_user: Optional[UserProfile] = prereg_user.referred_by
|
|
|
|
|
|
|
|
# A PregistrationUser should not be used for another UserProfile
|
|
|
|
assert prereg_user.created_user is None, "PregistrationUser should not be reused"
|
|
|
|
else:
|
|
|
|
streams = []
|
|
|
|
acting_user = None
|
|
|
|
|
|
|
|
user_was_invited = prereg_user is not None and (
|
|
|
|
prereg_user.referred_by is not None or prereg_user.multiuse_invite is not None
|
|
|
|
)
|
|
|
|
|
2023-07-09 13:30:19 +02:00
|
|
|
# If the Preregistration object didn't explicitly list some streams (it
|
|
|
|
# happens when user directly signs up without any invitation), we add the
|
|
|
|
# default streams for the realm. Note that we are fine with "slim" Stream
|
|
|
|
# objects for calling bulk_add_subscriptions and add_new_user_history,
|
|
|
|
# which we verify in StreamSetupTest tests that check query counts.
|
2023-07-09 11:42:07 +02:00
|
|
|
if len(streams) == 0 and not user_was_invited:
|
2023-07-09 13:30:19 +02:00
|
|
|
streams = get_slim_realm_default_streams(realm.id)
|
2023-07-09 11:42:07 +02:00
|
|
|
|
|
|
|
for default_stream_group in default_stream_groups:
|
|
|
|
default_stream_group_streams = default_stream_group.streams.all()
|
|
|
|
for stream in default_stream_group_streams:
|
|
|
|
if stream not in streams:
|
|
|
|
streams.append(stream)
|
|
|
|
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
realm,
|
|
|
|
streams,
|
|
|
|
[user_profile],
|
|
|
|
from_user_creation=True,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
add_new_user_history(user_profile, streams)
|
|
|
|
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None:
|
|
|
|
"""
|
2023-07-10 12:40:31 +02:00
|
|
|
Give the user some messages in their feed, so that they can learn how to
|
|
|
|
use the home view in a realistic way after finishing the tutorial.
|
|
|
|
|
|
|
|
Mark the very most recent messages as unread.
|
|
|
|
"""
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-07-10 12:40:31 +02:00
|
|
|
# Find recipient ids for the user's streams that were passed to us.
|
|
|
|
# (Only look at public streams.)
|
2022-04-14 23:53:15 +02:00
|
|
|
recipient_ids = [stream.recipient_id for stream in streams if not stream.invite_only]
|
|
|
|
|
2023-07-10 12:40:31 +02:00
|
|
|
# Start by finding recent messages matching those recipients.
|
2023-07-13 20:07:54 +02:00
|
|
|
cutoff_date = timezone_now() - ONBOARDING_RECENT_TIMEDELTA
|
2023-07-10 12:40:31 +02:00
|
|
|
recent_message_ids = set(
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(
|
|
|
|
# Uses index: zerver_message_realm_recipient_id
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
recipient_id__in=recipient_ids,
|
|
|
|
date_sent__gt=cutoff_date,
|
|
|
|
)
|
2023-07-10 12:40:31 +02:00
|
|
|
.order_by("-id")
|
|
|
|
.values_list("id", flat=True)[0:MAX_NUM_ONBOARDING_MESSAGES]
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
|
|
|
|
2023-07-10 12:40:31 +02:00
|
|
|
if len(recent_message_ids) > 0:
|
|
|
|
# Handle the race condition where a message arrives between
|
|
|
|
# bulk_add_subscriptions above and the Message query just above
|
|
|
|
already_used_ids = set(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
message_id__in=recent_message_ids, user_profile=user_profile
|
|
|
|
).values_list("message_id", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Exclude the already-used ids and sort them.
|
|
|
|
backfill_message_ids = sorted(recent_message_ids - already_used_ids)
|
|
|
|
|
|
|
|
# Find which message ids we should mark as read.
|
|
|
|
# (We don't want too many unread messages.)
|
|
|
|
older_message_ids = set(backfill_message_ids[:-MAX_NUM_ONBOARDING_UNREAD_MESSAGES])
|
|
|
|
|
|
|
|
# Create UserMessage rows for the backfill.
|
|
|
|
ums_to_create = []
|
|
|
|
for message_id in backfill_message_ids:
|
|
|
|
um = UserMessage(user_profile=user_profile, message_id=message_id)
|
|
|
|
if message_id in older_message_ids:
|
|
|
|
um.flags = UserMessage.flags.read
|
|
|
|
ums_to_create.append(um)
|
|
|
|
|
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Does the processing for a new user account:
|
|
|
|
# * Subscribes to default/invitation streams
|
|
|
|
# * Fills in some recent historical messages
|
|
|
|
# * Notifies other users in realm and Zulip about the signup
|
|
|
|
# * Deactivates PreregistrationUser objects
|
|
|
|
def process_new_human_user(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
prereg_user: Optional[PreregistrationUser] = None,
|
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
|
|
|
realm_creation: bool = False,
|
|
|
|
) -> None:
|
2023-07-09 11:42:07 +02:00
|
|
|
# subscribe to default/invitation streams and
|
|
|
|
# fill in some recent historical messages
|
|
|
|
set_up_streams_for_new_human_user(
|
|
|
|
user_profile=user_profile,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
default_stream_groups=default_stream_groups,
|
2023-05-09 08:59:31 +02:00
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-07-09 11:42:07 +02:00
|
|
|
realm = user_profile.realm
|
|
|
|
mit_beta_user = realm.is_zephyr_mirror_realm
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# mit_beta_users don't have a referred_by field
|
|
|
|
if (
|
|
|
|
not mit_beta_user
|
|
|
|
and prereg_user is not None
|
|
|
|
and prereg_user.referred_by is not None
|
|
|
|
and prereg_user.referred_by.is_active
|
|
|
|
):
|
2023-06-19 16:42:11 +02:00
|
|
|
# This is a cross-realm direct message.
|
2022-04-14 23:53:15 +02:00
|
|
|
with override_language(prereg_user.referred_by.default_language):
|
|
|
|
internal_send_private_message(
|
|
|
|
get_system_bot(settings.NOTIFICATION_BOT, prereg_user.referred_by.realm_id),
|
|
|
|
prereg_user.referred_by,
|
|
|
|
_("{user} accepted your invitation to join Zulip!").format(
|
2023-10-24 18:58:55 +02:00
|
|
|
user=silent_mention_syntax_for_user(user_profile)
|
2022-04-14 23:53:15 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2022-07-18 11:27:29 +02:00
|
|
|
# For the sake of tracking the history of UserProfiles,
|
|
|
|
# we want to tie the newly created user to the PreregistrationUser
|
|
|
|
# it was created from.
|
2022-04-14 23:53:15 +02:00
|
|
|
if prereg_user is not None:
|
2022-07-16 20:09:13 +02:00
|
|
|
prereg_user.status = confirmation_settings.STATUS_USED
|
2022-04-14 23:53:15 +02:00
|
|
|
prereg_user.created_user = user_profile
|
|
|
|
prereg_user.save(update_fields=["status", "created_user"])
|
|
|
|
|
2023-03-15 13:46:55 +01:00
|
|
|
# Mark any other PreregistrationUsers in the realm that are STATUS_USED as
|
|
|
|
# inactive so we can keep track of the PreregistrationUser we
|
|
|
|
# actually used for analytics.
|
|
|
|
if prereg_user is not None:
|
|
|
|
PreregistrationUser.objects.filter(
|
|
|
|
email__iexact=user_profile.delivery_email, realm=user_profile.realm
|
|
|
|
).exclude(id=prereg_user.id).update(status=confirmation_settings.STATUS_REVOKED)
|
|
|
|
else:
|
|
|
|
PreregistrationUser.objects.filter(
|
|
|
|
email__iexact=user_profile.delivery_email, realm=user_profile.realm
|
|
|
|
).update(status=confirmation_settings.STATUS_REVOKED)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-03-15 13:46:55 +01:00
|
|
|
if prereg_user is not None and prereg_user.referred_by is not None:
|
2023-08-05 14:50:19 +02:00
|
|
|
notify_invites_changed(user_profile.realm, changed_invite_referrer=prereg_user.referred_by)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
notify_new_user(user_profile)
|
|
|
|
# Clear any scheduled invitation emails to prevent them
|
|
|
|
# from being sent after the user is created.
|
|
|
|
clear_scheduled_invitation_emails(user_profile.delivery_email)
|
|
|
|
if realm.send_welcome_emails:
|
2021-12-23 01:41:41 +01:00
|
|
|
enqueue_welcome_emails(user_profile, realm_creation)
|
2023-06-30 13:27:25 +02:00
|
|
|
|
|
|
|
# Schedule an initial email with the user's
|
|
|
|
# new account details and log-in information.
|
|
|
|
send_account_registered_email(user_profile, realm_creation)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# We have an import loop here; it's intentional, because we want
|
|
|
|
# to keep all the onboarding code in zerver/lib/onboarding.py.
|
2023-05-01 18:01:47 +02:00
|
|
|
from zerver.lib.onboarding import send_initial_direct_message
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-05-01 18:01:47 +02:00
|
|
|
send_initial_direct_message(user_profile)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2023-10-18 05:24:50 +02:00
|
|
|
def notify_created_user(user_profile: UserProfile, notify_user_ids: List[int]) -> None:
|
2022-04-14 23:53:15 +02:00
|
|
|
user_row = user_profile_to_user_row(user_profile)
|
2023-03-02 10:00:52 +01:00
|
|
|
|
|
|
|
format_user_row_kwargs: Dict[str, Any] = {
|
2023-07-19 11:33:40 +02:00
|
|
|
"realm_id": user_profile.realm_id,
|
2023-03-02 10:00:52 +01:00
|
|
|
"row": user_row,
|
2022-04-14 23:53:15 +02:00
|
|
|
# Since we don't know what the client
|
|
|
|
# supports at this point in the code, we
|
|
|
|
# just assume client_gravatar and
|
|
|
|
# user_avatar_url_field_optional = False :(
|
2023-03-02 10:00:52 +01:00
|
|
|
"client_gravatar": False,
|
|
|
|
"user_avatar_url_field_optional": False,
|
2022-04-14 23:53:15 +02:00
|
|
|
# We assume there's no custom profile
|
|
|
|
# field data for a new user; initial
|
|
|
|
# values are expected to be added in a
|
|
|
|
# later event.
|
2023-03-02 10:00:52 +01:00
|
|
|
"custom_profile_field_data": {},
|
|
|
|
}
|
2021-12-11 08:17:57 +01:00
|
|
|
|
2023-11-03 04:39:40 +01:00
|
|
|
user_ids_without_access_to_created_user: List[int] = []
|
|
|
|
users_with_access_to_created_users: List[UserProfile] = []
|
2023-10-18 05:24:50 +02:00
|
|
|
|
|
|
|
if notify_user_ids:
|
|
|
|
# This is currently used to send creation event when a guest
|
|
|
|
# gains access to a user, so we depend on the caller to make
|
|
|
|
# sure that only accessible users receive the user data.
|
|
|
|
users_with_access_to_created_users = list(
|
|
|
|
user_profile.realm.get_active_users().filter(id__in=notify_user_ids)
|
|
|
|
)
|
2023-11-03 04:39:40 +01:00
|
|
|
else:
|
2023-10-18 05:24:50 +02:00
|
|
|
active_realm_users = list(user_profile.realm.get_active_users())
|
|
|
|
|
|
|
|
# This call to user_access_restricted_in_realm results in
|
|
|
|
# one extra query in the user creation codepath to check
|
|
|
|
# "realm.can_access_all_users_group.name" because we do
|
|
|
|
# not prefetch realm and its related fields when fetching
|
|
|
|
# PreregistrationUser object.
|
|
|
|
if user_access_restricted_in_realm(user_profile):
|
|
|
|
for user in active_realm_users:
|
|
|
|
if user.is_guest:
|
|
|
|
# This logic assumes that can_access_all_users_group
|
|
|
|
# setting can only be set to EVERYONE or MEMBERS.
|
|
|
|
user_ids_without_access_to_created_user.append(user.id)
|
|
|
|
else:
|
|
|
|
users_with_access_to_created_users.append(user)
|
|
|
|
else:
|
|
|
|
users_with_access_to_created_users = active_realm_users
|
2023-11-03 04:39:40 +01:00
|
|
|
|
2021-12-11 08:17:57 +01:00
|
|
|
user_ids_with_real_email_access = []
|
|
|
|
user_ids_without_real_email_access = []
|
2023-03-02 10:00:52 +01:00
|
|
|
|
|
|
|
person_for_real_email_access_users = None
|
|
|
|
person_for_without_real_email_access_users = None
|
2023-11-03 04:39:40 +01:00
|
|
|
for recipient_user in users_with_access_to_created_users:
|
2023-03-02 10:00:52 +01:00
|
|
|
if can_access_delivery_email(
|
|
|
|
recipient_user, user_profile.id, user_row["email_address_visibility"]
|
|
|
|
):
|
|
|
|
user_ids_with_real_email_access.append(recipient_user.id)
|
|
|
|
if person_for_real_email_access_users is None:
|
|
|
|
# This caller assumes that "format_user_row" only depends on
|
|
|
|
# specific value of "acting_user" among users in a realm in
|
|
|
|
# email_address_visibility.
|
|
|
|
person_for_real_email_access_users = format_user_row(
|
|
|
|
**format_user_row_kwargs,
|
|
|
|
acting_user=recipient_user,
|
|
|
|
)
|
2021-12-11 08:17:57 +01:00
|
|
|
else:
|
2023-03-02 10:00:52 +01:00
|
|
|
user_ids_without_real_email_access.append(recipient_user.id)
|
|
|
|
if person_for_without_real_email_access_users is None:
|
|
|
|
person_for_without_real_email_access_users = format_user_row(
|
|
|
|
**format_user_row_kwargs,
|
|
|
|
acting_user=recipient_user,
|
|
|
|
)
|
2021-12-11 08:17:57 +01:00
|
|
|
|
|
|
|
if user_ids_with_real_email_access:
|
2023-03-02 10:00:52 +01:00
|
|
|
assert person_for_real_email_access_users is not None
|
|
|
|
event: Dict[str, Any] = dict(
|
|
|
|
type="realm_user", op="add", person=person_for_real_email_access_users
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_with_real_email_access)
|
2021-12-11 08:17:57 +01:00
|
|
|
|
|
|
|
if user_ids_without_real_email_access:
|
2023-03-02 10:00:52 +01:00
|
|
|
assert person_for_without_real_email_access_users is not None
|
|
|
|
event = dict(type="realm_user", op="add", person=person_for_without_real_email_access_users)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_without_real_email_access)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-11-03 04:39:40 +01:00
|
|
|
if user_ids_without_access_to_created_user:
|
|
|
|
event = dict(
|
|
|
|
type="realm_user",
|
|
|
|
op="add",
|
|
|
|
person=get_data_for_inaccessible_user(user_profile.realm, user_profile.id),
|
2023-10-24 19:47:39 +02:00
|
|
|
inaccessible_user=True,
|
2023-11-03 04:39:40 +01:00
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_without_access_to_created_user)
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
def created_bot_event(user_profile: UserProfile) -> Dict[str, Any]:
|
|
|
|
def stream_name(stream: Optional[Stream]) -> Optional[str]:
|
|
|
|
if not stream:
|
|
|
|
return None
|
|
|
|
return stream.name
|
|
|
|
|
|
|
|
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
|
|
|
|
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
|
|
|
|
|
|
|
|
bot = dict(
|
|
|
|
email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
|
|
|
bot_type=user_profile.bot_type,
|
|
|
|
is_active=user_profile.is_active,
|
|
|
|
api_key=get_api_key(user_profile),
|
|
|
|
default_sending_stream=default_sending_stream_name,
|
|
|
|
default_events_register_stream=default_events_register_stream_name,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
services=get_service_dicts_for_bot(user_profile.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set the owner key only when the bot has an owner.
|
|
|
|
# The default bots don't have an owner. So don't
|
|
|
|
# set the owner key while reactivating them.
|
2023-07-17 18:19:01 +02:00
|
|
|
if user_profile.bot_owner_id is not None:
|
|
|
|
bot["owner_id"] = user_profile.bot_owner_id
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
return dict(type="realm_bot", op="add", bot=bot)
|
|
|
|
|
|
|
|
|
|
|
|
def notify_created_bot(user_profile: UserProfile) -> None:
|
|
|
|
event = created_bot_event(user_profile)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def do_create_user(
|
|
|
|
email: str,
|
|
|
|
password: Optional[str],
|
|
|
|
realm: Realm,
|
|
|
|
full_name: str,
|
|
|
|
bot_type: Optional[int] = None,
|
|
|
|
role: Optional[int] = None,
|
|
|
|
bot_owner: Optional[UserProfile] = None,
|
|
|
|
tos_version: Optional[str] = None,
|
|
|
|
timezone: str = "",
|
|
|
|
avatar_source: str = UserProfile.AVATAR_FROM_GRAVATAR,
|
2023-09-29 19:09:45 +02:00
|
|
|
default_language: Optional[str] = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
default_sending_stream: Optional[Stream] = None,
|
|
|
|
default_events_register_stream: Optional[Stream] = None,
|
|
|
|
default_all_public_streams: Optional[bool] = None,
|
|
|
|
prereg_user: Optional[PreregistrationUser] = None,
|
2023-03-10 11:42:18 +01:00
|
|
|
prereg_realm: Optional[PreregistrationRealm] = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
|
|
|
source_profile: Optional[UserProfile] = None,
|
|
|
|
realm_creation: bool = False,
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
enable_marketing_emails: bool = True,
|
2023-02-13 17:40:16 +01:00
|
|
|
email_address_visibility: Optional[int] = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> UserProfile:
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
user_profile = create_user(
|
|
|
|
email=email,
|
|
|
|
password=password,
|
|
|
|
realm=realm,
|
|
|
|
full_name=full_name,
|
|
|
|
role=role,
|
|
|
|
bot_type=bot_type,
|
|
|
|
bot_owner=bot_owner,
|
|
|
|
tos_version=tos_version,
|
|
|
|
timezone=timezone,
|
|
|
|
avatar_source=avatar_source,
|
|
|
|
default_language=default_language,
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams,
|
|
|
|
source_profile=source_profile,
|
|
|
|
enable_marketing_emails=enable_marketing_emails,
|
2023-02-13 17:40:16 +01:00
|
|
|
email_address_visibility=email_address_visibility,
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
event_time = user_profile.date_joined
|
|
|
|
if not acting_user:
|
|
|
|
acting_user = user_profile
|
|
|
|
RealmAuditLog.objects.create(
|
2022-08-15 16:23:57 +02:00
|
|
|
realm=user_profile.realm,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=user_profile,
|
|
|
|
event_type=RealmAuditLog.USER_CREATED,
|
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-08-15 16:23:57 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
if realm_creation:
|
|
|
|
# If this user just created a realm, make sure they are
|
|
|
|
# properly tagged as the creator of the realm.
|
|
|
|
realm_creation_audit_log = (
|
|
|
|
RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_CREATED, realm=realm)
|
|
|
|
.order_by("id")
|
|
|
|
.last()
|
|
|
|
)
|
|
|
|
assert realm_creation_audit_log is not None
|
|
|
|
realm_creation_audit_log.acting_user = user_profile
|
|
|
|
realm_creation_audit_log.save(update_fields=["acting_user"])
|
|
|
|
|
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["active_users_log:is_bot:day"],
|
|
|
|
user_profile.is_bot,
|
|
|
|
event_time,
|
2022-08-15 16:23:57 +02:00
|
|
|
)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
system_user_group = get_system_user_group_for_user(user_profile)
|
|
|
|
UserGroupMembership.objects.create(user_profile=user_profile, user_group=system_user_group)
|
2022-11-21 07:26:34 +01:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=system_user_group,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
|
|
|
|
full_members_system_group = UserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.FULL_MEMBERS,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
realm=user_profile.realm,
|
|
|
|
is_system_group=True,
|
|
|
|
)
|
|
|
|
UserGroupMembership.objects.create(
|
|
|
|
user_profile=user_profile, user_group=full_members_system_group
|
|
|
|
)
|
2022-11-21 07:26:34 +01:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=full_members_system_group,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# Note that for bots, the caller will send an additional event
|
|
|
|
# with bot-specific info like services.
|
2023-10-18 05:24:50 +02:00
|
|
|
notify_created_user(user_profile, [])
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
do_send_user_group_members_update_event("add_members", system_user_group, [user_profile.id])
|
|
|
|
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
|
|
|
|
do_send_user_group_members_update_event(
|
|
|
|
"add_members", full_members_system_group, [user_profile.id]
|
|
|
|
)
|
|
|
|
|
2023-03-10 11:42:18 +01:00
|
|
|
if prereg_realm is not None:
|
|
|
|
prereg_realm.created_user = user_profile
|
|
|
|
prereg_realm.save(update_fields=["created_user"])
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
if bot_type is None:
|
|
|
|
process_new_human_user(
|
|
|
|
user_profile,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
default_stream_groups=default_stream_groups,
|
|
|
|
realm_creation=realm_creation,
|
|
|
|
)
|
|
|
|
|
|
|
|
if realm_creation:
|
|
|
|
assert realm.signup_notifications_stream is not None
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
realm, [realm.signup_notifications_stream], [user_profile], acting_user=None
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.onboarding import send_initial_realm_messages
|
|
|
|
|
|
|
|
send_initial_realm_messages(realm)
|
|
|
|
|
|
|
|
return user_profile
|
|
|
|
|
|
|
|
|
|
|
|
def do_activate_mirror_dummy_user(
|
|
|
|
user_profile: UserProfile, *, acting_user: Optional[UserProfile]
|
|
|
|
) -> None:
|
|
|
|
"""Called to have a user "take over" a "mirror dummy" user
|
|
|
|
(i.e. is_mirror_dummy=True) account when they sign up with the
|
|
|
|
same email address.
|
|
|
|
|
|
|
|
Essentially, the result should be as though we had created the
|
|
|
|
UserProfile just now with do_create_user, except that the mirror
|
|
|
|
dummy user may appear as the recipient or sender of messages from
|
|
|
|
before their account was fully created.
|
|
|
|
|
|
|
|
TODO: This function likely has bugs resulting from this being a
|
|
|
|
parallel code path to do_create_user; e.g. it likely does not
|
|
|
|
handle preferences or default streams properly.
|
|
|
|
"""
|
|
|
|
with transaction.atomic():
|
|
|
|
change_user_is_active(user_profile, True)
|
|
|
|
user_profile.is_mirror_dummy = False
|
|
|
|
user_profile.set_unusable_password()
|
|
|
|
user_profile.date_joined = timezone_now()
|
|
|
|
user_profile.tos_version = settings.TERMS_OF_SERVICE_VERSION
|
|
|
|
user_profile.save(
|
|
|
|
update_fields=["date_joined", "password", "is_mirror_dummy", "tos_version"]
|
|
|
|
)
|
|
|
|
|
|
|
|
event_time = user_profile.date_joined
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
|
|
|
event_type=RealmAuditLog.USER_ACTIVATED,
|
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2022-04-14 23:53:15 +02:00
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["active_users_log:is_bot:day"],
|
|
|
|
user_profile.is_bot,
|
|
|
|
event_time,
|
|
|
|
)
|
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-10-18 05:24:50 +02:00
|
|
|
notify_created_user(user_profile, [])
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2022-04-14 23:53:15 +02:00
|
|
|
def do_reactivate_user(user_profile: UserProfile, *, acting_user: Optional[UserProfile]) -> None:
|
|
|
|
"""Reactivate a user that had previously been deactivated"""
|
2023-09-25 21:58:33 +02:00
|
|
|
if user_profile.is_mirror_dummy:
|
|
|
|
raise JsonableError(
|
|
|
|
_("Cannot activate a placeholder account; ask the user to sign up, instead.")
|
|
|
|
)
|
2022-08-15 15:54:50 +02:00
|
|
|
change_user_is_active(user_profile, True)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
|
|
|
event_type=RealmAuditLog.USER_REACTIVATED,
|
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-08-15 15:54:50 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2022-04-21 16:04:52 +02:00
|
|
|
|
|
|
|
bot_owner_changed = False
|
|
|
|
if (
|
|
|
|
user_profile.is_bot
|
|
|
|
and user_profile.bot_owner is not None
|
|
|
|
and not user_profile.bot_owner.is_active
|
|
|
|
and acting_user is not None
|
|
|
|
):
|
|
|
|
previous_owner = user_profile.bot_owner
|
|
|
|
user_profile.bot_owner = acting_user
|
|
|
|
user_profile.save() # Can't use update_fields because of how the foreign key works.
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=user_profile,
|
|
|
|
event_type=RealmAuditLog.USER_BOT_OWNER_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
bot_owner_changed = True
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["active_users_log:is_bot:day"],
|
|
|
|
user_profile.is_bot,
|
|
|
|
event_time,
|
|
|
|
)
|
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-10-30 12:50:40 +01:00
|
|
|
event = dict(
|
|
|
|
type="realm_user", op="update", person=dict(user_id=user_profile.id, is_active=True)
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if user_profile.is_bot:
|
2023-10-30 12:50:40 +01:00
|
|
|
event = dict(
|
|
|
|
type="realm_bot",
|
|
|
|
op="update",
|
|
|
|
bot=dict(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
is_active=True,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-04-21 16:04:52 +02:00
|
|
|
if bot_owner_changed:
|
|
|
|
from zerver.actions.bots import send_bot_owner_update_events
|
|
|
|
|
|
|
|
assert acting_user is not None
|
|
|
|
send_bot_owner_update_events(user_profile, acting_user, previous_owner)
|
|
|
|
|
2022-05-16 09:13:41 +02:00
|
|
|
if bot_owner_changed:
|
|
|
|
from zerver.actions.bots import remove_bot_from_inaccessible_private_streams
|
|
|
|
|
|
|
|
remove_bot_from_inaccessible_private_streams(user_profile, acting_user=acting_user)
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
subscribed_recipient_ids = Subscription.objects.filter(
|
|
|
|
user_profile_id=user_profile.id, active=True, recipient__type=Recipient.STREAM
|
|
|
|
).values_list("recipient__type_id", flat=True)
|
|
|
|
subscribed_streams = Stream.objects.filter(id__in=subscribed_recipient_ids, deactivated=False)
|
|
|
|
subscriber_peer_info = bulk_get_subscriber_peer_info(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
streams=subscribed_streams,
|
|
|
|
)
|
|
|
|
|
|
|
|
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for stream in subscribed_streams:
|
|
|
|
altered_user_dict[stream.id] = {user_profile.id}
|
|
|
|
|
|
|
|
stream_dict = {stream.id: stream for stream in subscribed_streams}
|
|
|
|
|
|
|
|
send_peer_subscriber_events(
|
|
|
|
op="peer_add",
|
|
|
|
realm=user_profile.realm,
|
|
|
|
altered_user_dict=altered_user_dict,
|
|
|
|
stream_dict=stream_dict,
|
2023-10-26 18:14:01 +02:00
|
|
|
subscriber_peer_info=subscriber_peer_info,
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|