2022-04-14 23:53:15 +02:00
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Iterable, Sequence
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import timedelta
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2024-06-27 15:06:09 +02:00
|
|
|
from django.db.models import F
|
2022-04-14 23:53:15 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import override as override_language
|
|
|
|
|
|
|
|
from confirmation import settings as confirmation_settings
|
2024-04-01 10:59:05 +02:00
|
|
|
from zerver.actions.message_send import (
|
2024-07-04 14:05:48 +02:00
|
|
|
internal_send_group_direct_message,
|
2024-04-01 10:59:05 +02:00
|
|
|
internal_send_private_message,
|
|
|
|
internal_send_stream_message,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.streams import bulk_add_subscriptions, send_peer_subscriber_events
|
|
|
|
from zerver.actions.user_groups import do_send_user_group_members_update_event
|
|
|
|
from zerver.actions.users import change_user_is_active, get_service_dicts_for_bot
|
|
|
|
from zerver.lib.avatar import avatar_url
|
|
|
|
from zerver.lib.create_user import create_user
|
2023-07-09 13:30:19 +02:00
|
|
|
from zerver.lib.default_streams import get_slim_realm_default_streams
|
2023-06-30 13:27:25 +02:00
|
|
|
from zerver.lib.email_notifications import enqueue_welcome_emails, send_account_registered_email
|
2023-09-25 21:58:33 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2024-04-30 22:57:29 +02:00
|
|
|
from zerver.lib.invites import notify_invites_changed
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.lib.mention import silent_mention_syntax_for_user
|
2023-12-09 15:00:30 +01:00
|
|
|
from zerver.lib.remote_server import maybe_enqueue_audit_log_upload
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.lib.send_email import clear_scheduled_invitation_emails
|
|
|
|
from zerver.lib.stream_subscription import bulk_get_subscriber_peer_info
|
2024-02-09 20:59:56 +01:00
|
|
|
from zerver.lib.streams import can_access_stream_history
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.lib.user_counts import realm_user_count, realm_user_count_by_role
|
|
|
|
from zerver.lib.user_groups import get_system_user_group_for_user
|
2021-12-11 08:17:57 +01:00
|
|
|
from zerver.lib.users import (
|
|
|
|
can_access_delivery_email,
|
|
|
|
format_user_row,
|
|
|
|
get_api_key,
|
2023-11-03 04:39:40 +01:00
|
|
|
get_data_for_inaccessible_user,
|
|
|
|
user_access_restricted_in_realm,
|
2021-12-11 08:17:57 +01:00
|
|
|
user_profile_to_user_row,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.models import (
|
|
|
|
DefaultStreamGroup,
|
|
|
|
Message,
|
2024-04-17 05:45:32 +02:00
|
|
|
NamedUserGroup,
|
2024-06-28 13:40:08 +02:00
|
|
|
OnboardingStep,
|
2024-06-21 12:50:32 +02:00
|
|
|
OnboardingUserMessage,
|
2023-03-10 11:42:18 +01:00
|
|
|
PreregistrationRealm,
|
2022-04-14 23:53:15 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserGroupMembership,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-08-30 18:15:41 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import active_user_ids, bot_owner_user_ids, get_system_bot
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
from zerver.tornado.django_api import send_event_on_commit
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2024-06-21 09:31:18 +02:00
|
|
|
MAX_NUM_RECENT_MESSAGES = 1000
|
|
|
|
MAX_NUM_RECENT_UNREAD_MESSAGES = 20
|
2023-07-10 12:40:31 +02:00
|
|
|
|
2023-07-11 23:06:42 +02:00
|
|
|
# We don't want to mark years-old messages as unread, since that might
|
|
|
|
# feel like Zulip is buggy, but in low-traffic or bursty-traffic
|
|
|
|
# organizations, it's reasonable for the most recent 20 messages to be
|
|
|
|
# several weeks old and still be a good place to start.
|
2024-06-21 09:31:18 +02:00
|
|
|
RECENT_MESSAGES_TIMEDELTA = timedelta(weeks=12)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def send_message_to_signup_notification_stream(
|
2022-07-19 11:43:10 +02:00
|
|
|
sender: UserProfile, realm: Realm, message: str
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> None:
|
2024-02-07 17:11:43 +01:00
|
|
|
signup_announcements_stream = realm.get_signup_announcements_stream()
|
|
|
|
if signup_announcements_stream is None:
|
2022-04-14 23:53:15 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
with override_language(realm.default_language):
|
2022-07-19 11:43:10 +02:00
|
|
|
topic_name = _("signups")
|
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
internal_send_stream_message(sender, signup_announcements_stream, topic_name, message)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2024-04-01 10:59:05 +02:00
|
|
|
def send_group_direct_message_to_admins(sender: UserProfile, realm: Realm, content: str) -> None:
|
|
|
|
administrators = list(realm.get_human_admin_users())
|
2024-07-04 14:05:48 +02:00
|
|
|
internal_send_group_direct_message(
|
2024-04-01 10:59:05 +02:00
|
|
|
realm,
|
|
|
|
sender,
|
|
|
|
content,
|
|
|
|
recipient_users=administrators,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
def notify_new_user(user_profile: UserProfile) -> None:
|
|
|
|
user_count = realm_user_count(user_profile.realm)
|
|
|
|
sender_email = settings.NOTIFICATION_BOT
|
|
|
|
sender = get_system_bot(sender_email, user_profile.realm_id)
|
|
|
|
|
|
|
|
is_first_user = user_count == 1
|
|
|
|
if not is_first_user:
|
2022-07-19 11:43:10 +02:00
|
|
|
with override_language(user_profile.realm.default_language):
|
2024-02-06 09:26:58 +01:00
|
|
|
message = _("{user} joined this organization.").format(
|
2022-07-19 11:43:10 +02:00
|
|
|
user=silent_mention_syntax_for_user(user_profile), user_count=user_count
|
|
|
|
)
|
2024-04-01 10:59:05 +02:00
|
|
|
send_message_to_signup_notification_stream(sender, user_profile.realm, message)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.registration import generate_licenses_low_warning_message_if_required
|
|
|
|
|
|
|
|
licenses_low_warning_message = generate_licenses_low_warning_message_if_required(
|
|
|
|
user_profile.realm
|
|
|
|
)
|
|
|
|
if licenses_low_warning_message is not None:
|
|
|
|
message += "\n"
|
|
|
|
message += licenses_low_warning_message
|
2024-04-01 10:59:05 +02:00
|
|
|
send_group_direct_message_to_admins(sender, user_profile.realm, message)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2023-07-09 11:42:07 +02:00
|
|
|
def set_up_streams_for_new_human_user(
|
|
|
|
*,
|
|
|
|
user_profile: UserProfile,
|
2024-07-12 02:30:23 +02:00
|
|
|
prereg_user: PreregistrationUser | None = None,
|
2023-07-09 11:42:07 +02:00
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
2024-04-02 00:56:05 +02:00
|
|
|
add_initial_stream_subscriptions: bool = True,
|
2024-06-27 15:40:05 +02:00
|
|
|
realm_creation: bool = False,
|
2023-07-09 11:42:07 +02:00
|
|
|
) -> None:
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
|
|
|
if prereg_user is not None:
|
2024-07-12 02:30:17 +02:00
|
|
|
streams: list[Stream] = list(prereg_user.streams.all())
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None = prereg_user.referred_by
|
2023-07-09 11:42:07 +02:00
|
|
|
|
|
|
|
# A PregistrationUser should not be used for another UserProfile
|
|
|
|
assert prereg_user.created_user is None, "PregistrationUser should not be reused"
|
|
|
|
else:
|
|
|
|
streams = []
|
|
|
|
acting_user = None
|
|
|
|
|
2024-04-02 00:56:05 +02:00
|
|
|
if add_initial_stream_subscriptions:
|
2024-05-22 09:38:08 +02:00
|
|
|
# If prereg_user.include_realm_default_subscriptions is true, we
|
|
|
|
# add the default streams for the realm to the list of streams.
|
|
|
|
# Note that we are fine with "slim" Stream objects for calling
|
|
|
|
# bulk_add_subscriptions and add_new_user_history, which we verify
|
|
|
|
# in StreamSetupTest tests that check query counts.
|
2023-05-25 16:06:13 +02:00
|
|
|
if prereg_user is None or prereg_user.include_realm_default_subscriptions:
|
|
|
|
default_streams = get_slim_realm_default_streams(realm.id)
|
|
|
|
streams = list(set(streams) | set(default_streams))
|
2024-04-02 00:56:05 +02:00
|
|
|
|
|
|
|
for default_stream_group in default_stream_groups:
|
|
|
|
default_stream_group_streams = default_stream_group.streams.all()
|
|
|
|
for stream in default_stream_group_streams:
|
|
|
|
if stream not in streams:
|
|
|
|
streams.append(stream)
|
|
|
|
else:
|
|
|
|
streams = []
|
2023-07-09 11:42:07 +02:00
|
|
|
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
realm,
|
|
|
|
streams,
|
|
|
|
[user_profile],
|
|
|
|
from_user_creation=True,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
2024-06-27 15:40:05 +02:00
|
|
|
add_new_user_history(user_profile, streams, realm_creation=realm_creation)
|
2023-07-09 11:42:07 +02:00
|
|
|
|
|
|
|
|
2024-06-27 15:40:05 +02:00
|
|
|
def add_new_user_history(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
streams: Iterable[Stream],
|
|
|
|
*,
|
|
|
|
realm_creation: bool = False,
|
|
|
|
) -> None:
|
2022-04-14 23:53:15 +02:00
|
|
|
"""
|
2024-06-21 12:50:32 +02:00
|
|
|
Give the user some messages in their feed, so that they can learn
|
|
|
|
how to use the home view in a realistic way.
|
2023-07-10 12:40:31 +02:00
|
|
|
|
2024-06-21 12:50:32 +02:00
|
|
|
For realms having older onboarding messages, mark the very
|
|
|
|
most recent messages as unread. Otherwise, ONLY mark the
|
|
|
|
messages tracked in 'OnboardingUserMessage' as unread.
|
2023-07-10 12:40:31 +02:00
|
|
|
"""
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2024-06-21 12:50:32 +02:00
|
|
|
realm = user_profile.realm
|
2024-05-20 22:16:21 +02:00
|
|
|
# Find recipient ids for the user's streams, limiting to just
|
2024-02-09 20:59:56 +01:00
|
|
|
# those where we can access the streams' full history.
|
|
|
|
#
|
|
|
|
# TODO: This will do database queries in a loop if many private
|
|
|
|
# streams are involved.
|
|
|
|
recipient_ids = [
|
|
|
|
stream.recipient_id for stream in streams if can_access_stream_history(user_profile, stream)
|
|
|
|
]
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-07-10 12:40:31 +02:00
|
|
|
# Start by finding recent messages matching those recipients.
|
2024-06-21 09:31:18 +02:00
|
|
|
cutoff_date = timezone_now() - RECENT_MESSAGES_TIMEDELTA
|
2023-07-10 12:40:31 +02:00
|
|
|
recent_message_ids = set(
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(
|
|
|
|
# Uses index: zerver_message_realm_recipient_id
|
2024-06-21 12:50:32 +02:00
|
|
|
realm_id=realm.id,
|
2023-08-30 21:19:37 +02:00
|
|
|
recipient_id__in=recipient_ids,
|
|
|
|
date_sent__gt=cutoff_date,
|
|
|
|
)
|
2023-07-10 12:40:31 +02:00
|
|
|
.order_by("-id")
|
2024-06-21 09:31:18 +02:00
|
|
|
.values_list("id", flat=True)[0:MAX_NUM_RECENT_MESSAGES]
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
|
|
|
|
2024-06-21 12:50:32 +02:00
|
|
|
tracked_onboarding_message_ids = set()
|
2024-06-27 15:06:09 +02:00
|
|
|
message_id_to_onboarding_user_message = {}
|
2024-06-21 12:50:32 +02:00
|
|
|
onboarding_user_messages_queryset = OnboardingUserMessage.objects.filter(realm_id=realm.id)
|
|
|
|
for onboarding_user_message in onboarding_user_messages_queryset:
|
|
|
|
tracked_onboarding_message_ids.add(onboarding_user_message.message_id)
|
2024-06-27 15:06:09 +02:00
|
|
|
message_id_to_onboarding_user_message[onboarding_user_message.message_id] = (
|
|
|
|
onboarding_user_message
|
|
|
|
)
|
2024-06-21 12:50:32 +02:00
|
|
|
tracked_onboarding_messages_exist = len(tracked_onboarding_message_ids) > 0
|
|
|
|
|
|
|
|
message_history_ids = recent_message_ids.union(tracked_onboarding_message_ids)
|
|
|
|
|
|
|
|
if len(message_history_ids) > 0:
|
2023-07-10 12:40:31 +02:00
|
|
|
# Handle the race condition where a message arrives between
|
2024-06-21 12:50:32 +02:00
|
|
|
# bulk_add_subscriptions above and the recent message query just above
|
2023-07-10 12:40:31 +02:00
|
|
|
already_used_ids = set(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
message_id__in=recent_message_ids, user_profile=user_profile
|
|
|
|
).values_list("message_id", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
# Exclude the already-used ids and sort them.
|
2024-06-21 12:50:32 +02:00
|
|
|
backfill_message_ids = sorted(message_history_ids - already_used_ids)
|
2023-07-10 12:40:31 +02:00
|
|
|
|
|
|
|
# Find which message ids we should mark as read.
|
|
|
|
# (We don't want too many unread messages.)
|
2024-06-21 12:50:32 +02:00
|
|
|
older_message_ids = set()
|
|
|
|
if not tracked_onboarding_messages_exist:
|
|
|
|
older_message_ids = set(backfill_message_ids[:-MAX_NUM_RECENT_UNREAD_MESSAGES])
|
2023-07-10 12:40:31 +02:00
|
|
|
|
|
|
|
# Create UserMessage rows for the backfill.
|
|
|
|
ums_to_create = []
|
|
|
|
for message_id in backfill_message_ids:
|
2024-06-27 15:40:05 +02:00
|
|
|
um = UserMessage(user_profile=user_profile, message_id=message_id)
|
|
|
|
# Only onboarding messages are available for realm creator.
|
|
|
|
# They are not marked as historical.
|
|
|
|
if not realm_creation:
|
|
|
|
um.flags = UserMessage.flags.historical
|
2024-06-27 15:06:09 +02:00
|
|
|
if tracked_onboarding_messages_exist:
|
|
|
|
if message_id not in tracked_onboarding_message_ids:
|
|
|
|
um.flags |= UserMessage.flags.read
|
|
|
|
elif message_id_to_onboarding_user_message[message_id].flags.starred.is_set:
|
|
|
|
um.flags |= UserMessage.flags.starred
|
|
|
|
elif message_id in older_message_ids:
|
2024-06-21 15:36:58 +02:00
|
|
|
um.flags |= UserMessage.flags.read
|
2023-07-10 12:40:31 +02:00
|
|
|
ums_to_create.append(um)
|
|
|
|
|
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
# Does the processing for a new user account:
|
|
|
|
# * Subscribes to default/invitation streams
|
|
|
|
# * Fills in some recent historical messages
|
|
|
|
# * Notifies other users in realm and Zulip about the signup
|
|
|
|
# * Deactivates PreregistrationUser objects
|
2024-06-28 13:40:08 +02:00
|
|
|
# * Mark 'visibility_policy_banner' as read
|
2022-04-14 23:53:15 +02:00
|
|
|
def process_new_human_user(
|
|
|
|
user_profile: UserProfile,
|
2024-07-12 02:30:23 +02:00
|
|
|
prereg_user: PreregistrationUser | None = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
|
|
|
realm_creation: bool = False,
|
2024-04-02 00:56:05 +02:00
|
|
|
add_initial_stream_subscriptions: bool = True,
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> None:
|
2023-07-09 11:42:07 +02:00
|
|
|
# subscribe to default/invitation streams and
|
|
|
|
# fill in some recent historical messages
|
|
|
|
set_up_streams_for_new_human_user(
|
|
|
|
user_profile=user_profile,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
default_stream_groups=default_stream_groups,
|
2024-04-02 00:56:05 +02:00
|
|
|
add_initial_stream_subscriptions=add_initial_stream_subscriptions,
|
2024-06-27 15:40:05 +02:00
|
|
|
realm_creation=realm_creation,
|
2023-05-09 08:59:31 +02:00
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-07-09 11:42:07 +02:00
|
|
|
realm = user_profile.realm
|
|
|
|
mit_beta_user = realm.is_zephyr_mirror_realm
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# mit_beta_users don't have a referred_by field
|
|
|
|
if (
|
|
|
|
not mit_beta_user
|
|
|
|
and prereg_user is not None
|
|
|
|
and prereg_user.referred_by is not None
|
|
|
|
and prereg_user.referred_by.is_active
|
2024-04-18 19:38:47 +02:00
|
|
|
and prereg_user.notify_referrer_on_join
|
2022-04-14 23:53:15 +02:00
|
|
|
):
|
2023-06-19 16:42:11 +02:00
|
|
|
# This is a cross-realm direct message.
|
2022-04-14 23:53:15 +02:00
|
|
|
with override_language(prereg_user.referred_by.default_language):
|
|
|
|
internal_send_private_message(
|
|
|
|
get_system_bot(settings.NOTIFICATION_BOT, prereg_user.referred_by.realm_id),
|
|
|
|
prereg_user.referred_by,
|
|
|
|
_("{user} accepted your invitation to join Zulip!").format(
|
2023-10-24 18:58:55 +02:00
|
|
|
user=silent_mention_syntax_for_user(user_profile)
|
2022-04-14 23:53:15 +02:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2022-07-18 11:27:29 +02:00
|
|
|
# For the sake of tracking the history of UserProfiles,
|
|
|
|
# we want to tie the newly created user to the PreregistrationUser
|
|
|
|
# it was created from.
|
2022-04-14 23:53:15 +02:00
|
|
|
if prereg_user is not None:
|
2022-07-16 20:09:13 +02:00
|
|
|
prereg_user.status = confirmation_settings.STATUS_USED
|
2022-04-14 23:53:15 +02:00
|
|
|
prereg_user.created_user = user_profile
|
|
|
|
prereg_user.save(update_fields=["status", "created_user"])
|
|
|
|
|
2023-03-15 13:46:55 +01:00
|
|
|
# Mark any other PreregistrationUsers in the realm that are STATUS_USED as
|
|
|
|
# inactive so we can keep track of the PreregistrationUser we
|
|
|
|
# actually used for analytics.
|
|
|
|
if prereg_user is not None:
|
|
|
|
PreregistrationUser.objects.filter(
|
|
|
|
email__iexact=user_profile.delivery_email, realm=user_profile.realm
|
|
|
|
).exclude(id=prereg_user.id).update(status=confirmation_settings.STATUS_REVOKED)
|
|
|
|
else:
|
|
|
|
PreregistrationUser.objects.filter(
|
|
|
|
email__iexact=user_profile.delivery_email, realm=user_profile.realm
|
|
|
|
).update(status=confirmation_settings.STATUS_REVOKED)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-03-15 13:46:55 +01:00
|
|
|
if prereg_user is not None and prereg_user.referred_by is not None:
|
2023-08-05 14:50:19 +02:00
|
|
|
notify_invites_changed(user_profile.realm, changed_invite_referrer=prereg_user.referred_by)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
notify_new_user(user_profile)
|
|
|
|
# Clear any scheduled invitation emails to prevent them
|
|
|
|
# from being sent after the user is created.
|
|
|
|
clear_scheduled_invitation_emails(user_profile.delivery_email)
|
|
|
|
if realm.send_welcome_emails:
|
2021-12-23 01:41:41 +01:00
|
|
|
enqueue_welcome_emails(user_profile, realm_creation)
|
2023-06-30 13:27:25 +02:00
|
|
|
|
|
|
|
# Schedule an initial email with the user's
|
|
|
|
# new account details and log-in information.
|
|
|
|
send_account_registered_email(user_profile, realm_creation)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# We have an import loop here; it's intentional, because we want
|
|
|
|
# to keep all the onboarding code in zerver/lib/onboarding.py.
|
2023-05-01 18:01:47 +02:00
|
|
|
from zerver.lib.onboarding import send_initial_direct_message
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2024-06-27 15:06:09 +02:00
|
|
|
message_id = send_initial_direct_message(user_profile)
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile, message_id=message_id).update(
|
|
|
|
flags=F("flags").bitor(UserMessage.flags.starred)
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2024-06-28 13:40:08 +02:00
|
|
|
# The 'visibility_policy_banner' is only displayed to existing users.
|
|
|
|
# Mark it as read for a new user.
|
|
|
|
OnboardingStep.objects.create(user=user_profile, onboarding_step="visibility_policy_banner")
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def notify_created_user(user_profile: UserProfile, notify_user_ids: list[int]) -> None:
|
2022-04-14 23:53:15 +02:00
|
|
|
user_row = user_profile_to_user_row(user_profile)
|
2023-03-02 10:00:52 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
format_user_row_kwargs: dict[str, Any] = {
|
2023-07-19 11:33:40 +02:00
|
|
|
"realm_id": user_profile.realm_id,
|
2023-03-02 10:00:52 +01:00
|
|
|
"row": user_row,
|
2022-04-14 23:53:15 +02:00
|
|
|
# Since we don't know what the client
|
|
|
|
# supports at this point in the code, we
|
|
|
|
# just assume client_gravatar and
|
|
|
|
# user_avatar_url_field_optional = False :(
|
2023-03-02 10:00:52 +01:00
|
|
|
"client_gravatar": False,
|
|
|
|
"user_avatar_url_field_optional": False,
|
2022-04-14 23:53:15 +02:00
|
|
|
# We assume there's no custom profile
|
|
|
|
# field data for a new user; initial
|
|
|
|
# values are expected to be added in a
|
|
|
|
# later event.
|
2023-03-02 10:00:52 +01:00
|
|
|
"custom_profile_field_data": {},
|
|
|
|
}
|
2021-12-11 08:17:57 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
user_ids_without_access_to_created_user: list[int] = []
|
|
|
|
users_with_access_to_created_users: list[UserProfile] = []
|
2023-10-18 05:24:50 +02:00
|
|
|
|
|
|
|
if notify_user_ids:
|
|
|
|
# This is currently used to send creation event when a guest
|
|
|
|
# gains access to a user, so we depend on the caller to make
|
|
|
|
# sure that only accessible users receive the user data.
|
|
|
|
users_with_access_to_created_users = list(
|
|
|
|
user_profile.realm.get_active_users().filter(id__in=notify_user_ids)
|
|
|
|
)
|
2023-11-03 04:39:40 +01:00
|
|
|
else:
|
2023-10-18 05:24:50 +02:00
|
|
|
active_realm_users = list(user_profile.realm.get_active_users())
|
|
|
|
|
|
|
|
# This call to user_access_restricted_in_realm results in
|
|
|
|
# one extra query in the user creation codepath to check
|
|
|
|
# "realm.can_access_all_users_group.name" because we do
|
|
|
|
# not prefetch realm and its related fields when fetching
|
|
|
|
# PreregistrationUser object.
|
|
|
|
if user_access_restricted_in_realm(user_profile):
|
|
|
|
for user in active_realm_users:
|
|
|
|
if user.is_guest:
|
|
|
|
# This logic assumes that can_access_all_users_group
|
|
|
|
# setting can only be set to EVERYONE or MEMBERS.
|
|
|
|
user_ids_without_access_to_created_user.append(user.id)
|
|
|
|
else:
|
|
|
|
users_with_access_to_created_users.append(user)
|
|
|
|
else:
|
|
|
|
users_with_access_to_created_users = active_realm_users
|
2023-11-03 04:39:40 +01:00
|
|
|
|
2021-12-11 08:17:57 +01:00
|
|
|
user_ids_with_real_email_access = []
|
|
|
|
user_ids_without_real_email_access = []
|
2023-03-02 10:00:52 +01:00
|
|
|
|
|
|
|
person_for_real_email_access_users = None
|
|
|
|
person_for_without_real_email_access_users = None
|
2023-11-03 04:39:40 +01:00
|
|
|
for recipient_user in users_with_access_to_created_users:
|
2023-03-02 10:00:52 +01:00
|
|
|
if can_access_delivery_email(
|
|
|
|
recipient_user, user_profile.id, user_row["email_address_visibility"]
|
|
|
|
):
|
|
|
|
user_ids_with_real_email_access.append(recipient_user.id)
|
|
|
|
if person_for_real_email_access_users is None:
|
|
|
|
# This caller assumes that "format_user_row" only depends on
|
|
|
|
# specific value of "acting_user" among users in a realm in
|
|
|
|
# email_address_visibility.
|
|
|
|
person_for_real_email_access_users = format_user_row(
|
|
|
|
**format_user_row_kwargs,
|
|
|
|
acting_user=recipient_user,
|
|
|
|
)
|
2021-12-11 08:17:57 +01:00
|
|
|
else:
|
2023-03-02 10:00:52 +01:00
|
|
|
user_ids_without_real_email_access.append(recipient_user.id)
|
|
|
|
if person_for_without_real_email_access_users is None:
|
|
|
|
person_for_without_real_email_access_users = format_user_row(
|
|
|
|
**format_user_row_kwargs,
|
|
|
|
acting_user=recipient_user,
|
|
|
|
)
|
2021-12-11 08:17:57 +01:00
|
|
|
|
|
|
|
if user_ids_with_real_email_access:
|
2023-03-02 10:00:52 +01:00
|
|
|
assert person_for_real_email_access_users is not None
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, Any] = dict(
|
2023-03-02 10:00:52 +01:00
|
|
|
type="realm_user", op="add", person=person_for_real_email_access_users
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_with_real_email_access)
|
2021-12-11 08:17:57 +01:00
|
|
|
|
|
|
|
if user_ids_without_real_email_access:
|
2023-03-02 10:00:52 +01:00
|
|
|
assert person_for_without_real_email_access_users is not None
|
|
|
|
event = dict(type="realm_user", op="add", person=person_for_without_real_email_access_users)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_without_real_email_access)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-11-03 04:39:40 +01:00
|
|
|
if user_ids_without_access_to_created_user:
|
|
|
|
event = dict(
|
|
|
|
type="realm_user",
|
|
|
|
op="add",
|
|
|
|
person=get_data_for_inaccessible_user(user_profile.realm, user_profile.id),
|
2023-10-24 19:47:39 +02:00
|
|
|
inaccessible_user=True,
|
2023-11-03 04:39:40 +01:00
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, user_ids_without_access_to_created_user)
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def created_bot_event(user_profile: UserProfile) -> dict[str, Any]:
|
2024-07-12 02:30:23 +02:00
|
|
|
def stream_name(stream: Stream | None) -> str | None:
|
2022-04-14 23:53:15 +02:00
|
|
|
if not stream:
|
|
|
|
return None
|
|
|
|
return stream.name
|
|
|
|
|
|
|
|
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
|
|
|
|
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
|
|
|
|
|
|
|
|
bot = dict(
|
|
|
|
email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
|
|
|
bot_type=user_profile.bot_type,
|
|
|
|
is_active=user_profile.is_active,
|
|
|
|
api_key=get_api_key(user_profile),
|
|
|
|
default_sending_stream=default_sending_stream_name,
|
|
|
|
default_events_register_stream=default_events_register_stream_name,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
services=get_service_dicts_for_bot(user_profile.id),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set the owner key only when the bot has an owner.
|
|
|
|
# The default bots don't have an owner. So don't
|
|
|
|
# set the owner key while reactivating them.
|
2023-07-17 18:19:01 +02:00
|
|
|
if user_profile.bot_owner_id is not None:
|
|
|
|
bot["owner_id"] = user_profile.bot_owner_id
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
return dict(type="realm_bot", op="add", bot=bot)
|
|
|
|
|
|
|
|
|
|
|
|
def notify_created_bot(user_profile: UserProfile) -> None:
|
|
|
|
event = created_bot_event(user_profile)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
def do_create_user(
|
|
|
|
email: str,
|
2024-07-12 02:30:23 +02:00
|
|
|
password: str | None,
|
2022-04-14 23:53:15 +02:00
|
|
|
realm: Realm,
|
|
|
|
full_name: str,
|
2024-07-12 02:30:23 +02:00
|
|
|
bot_type: int | None = None,
|
|
|
|
role: int | None = None,
|
|
|
|
bot_owner: UserProfile | None = None,
|
|
|
|
tos_version: str | None = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
timezone: str = "",
|
|
|
|
avatar_source: str = UserProfile.AVATAR_FROM_GRAVATAR,
|
2024-07-12 02:30:23 +02:00
|
|
|
default_language: str | None = None,
|
|
|
|
default_sending_stream: Stream | None = None,
|
|
|
|
default_events_register_stream: Stream | None = None,
|
|
|
|
default_all_public_streams: bool | None = None,
|
|
|
|
prereg_user: PreregistrationUser | None = None,
|
|
|
|
prereg_realm: PreregistrationRealm | None = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
default_stream_groups: Sequence[DefaultStreamGroup] = [],
|
2024-07-12 02:30:23 +02:00
|
|
|
source_profile: UserProfile | None = None,
|
2022-04-14 23:53:15 +02:00
|
|
|
realm_creation: bool = False,
|
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2022-04-14 23:53:15 +02:00
|
|
|
enable_marketing_emails: bool = True,
|
2024-07-12 02:30:23 +02:00
|
|
|
email_address_visibility: int | None = None,
|
2024-04-02 00:56:05 +02:00
|
|
|
add_initial_stream_subscriptions: bool = True,
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> UserProfile:
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
user_profile = create_user(
|
|
|
|
email=email,
|
|
|
|
password=password,
|
|
|
|
realm=realm,
|
|
|
|
full_name=full_name,
|
|
|
|
role=role,
|
|
|
|
bot_type=bot_type,
|
|
|
|
bot_owner=bot_owner,
|
|
|
|
tos_version=tos_version,
|
|
|
|
timezone=timezone,
|
|
|
|
avatar_source=avatar_source,
|
|
|
|
default_language=default_language,
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams,
|
|
|
|
source_profile=source_profile,
|
|
|
|
enable_marketing_emails=enable_marketing_emails,
|
2023-02-13 17:40:16 +01:00
|
|
|
email_address_visibility=email_address_visibility,
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
event_time = user_profile.date_joined
|
|
|
|
if not acting_user:
|
|
|
|
acting_user = user_profile
|
|
|
|
RealmAuditLog.objects.create(
|
2022-08-15 16:23:57 +02:00
|
|
|
realm=user_profile.realm,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=user_profile,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_CREATED,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-08-15 16:23:57 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
if realm_creation:
|
|
|
|
# If this user just created a realm, make sure they are
|
|
|
|
# properly tagged as the creator of the realm.
|
|
|
|
realm_creation_audit_log = (
|
|
|
|
RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_CREATED, realm=realm)
|
|
|
|
.order_by("id")
|
|
|
|
.last()
|
|
|
|
)
|
|
|
|
assert realm_creation_audit_log is not None
|
|
|
|
realm_creation_audit_log.acting_user = user_profile
|
|
|
|
realm_creation_audit_log.save(update_fields=["acting_user"])
|
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
system_user_group = get_system_user_group_for_user(user_profile)
|
|
|
|
UserGroupMembership.objects.create(user_profile=user_profile, user_group=system_user_group)
|
2022-11-21 07:26:34 +01:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=system_user_group,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
|
|
|
|
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
|
2024-04-17 05:45:32 +02:00
|
|
|
full_members_system_group = NamedUserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.FULL_MEMBERS,
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
realm=user_profile.realm,
|
|
|
|
is_system_group=True,
|
|
|
|
)
|
|
|
|
UserGroupMembership.objects.create(
|
|
|
|
user_profile=user_profile, user_group=full_members_system_group
|
|
|
|
)
|
2022-11-21 07:26:34 +01:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=full_members_system_group,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
# Note that for bots, the caller will send an additional event
|
|
|
|
# with bot-specific info like services.
|
2023-10-18 05:24:50 +02:00
|
|
|
notify_created_user(user_profile, [])
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
do_send_user_group_members_update_event("add_members", system_user_group, [user_profile.id])
|
|
|
|
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
|
|
|
|
do_send_user_group_members_update_event(
|
|
|
|
"add_members", full_members_system_group, [user_profile.id]
|
|
|
|
)
|
|
|
|
|
2023-03-10 11:42:18 +01:00
|
|
|
if prereg_realm is not None:
|
|
|
|
prereg_realm.created_user = user_profile
|
|
|
|
prereg_realm.save(update_fields=["created_user"])
|
|
|
|
|
2024-06-27 15:40:05 +02:00
|
|
|
if realm_creation:
|
|
|
|
from zerver.lib.onboarding import send_initial_realm_messages
|
|
|
|
|
|
|
|
with override_language(realm.default_language):
|
|
|
|
send_initial_realm_messages(realm)
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
if bot_type is None:
|
|
|
|
process_new_human_user(
|
|
|
|
user_profile,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
default_stream_groups=default_stream_groups,
|
|
|
|
realm_creation=realm_creation,
|
2024-04-02 00:56:05 +02:00
|
|
|
add_initial_stream_subscriptions=add_initial_stream_subscriptions,
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return user_profile
|
|
|
|
|
|
|
|
|
|
|
|
def do_activate_mirror_dummy_user(
|
2024-07-12 02:30:23 +02:00
|
|
|
user_profile: UserProfile, *, acting_user: UserProfile | None
|
2022-04-14 23:53:15 +02:00
|
|
|
) -> None:
|
|
|
|
"""Called to have a user "take over" a "mirror dummy" user
|
|
|
|
(i.e. is_mirror_dummy=True) account when they sign up with the
|
|
|
|
same email address.
|
|
|
|
|
|
|
|
Essentially, the result should be as though we had created the
|
|
|
|
UserProfile just now with do_create_user, except that the mirror
|
|
|
|
dummy user may appear as the recipient or sender of messages from
|
|
|
|
before their account was fully created.
|
|
|
|
|
|
|
|
TODO: This function likely has bugs resulting from this being a
|
|
|
|
parallel code path to do_create_user; e.g. it likely does not
|
|
|
|
handle preferences or default streams properly.
|
|
|
|
"""
|
|
|
|
with transaction.atomic():
|
|
|
|
change_user_is_active(user_profile, True)
|
|
|
|
user_profile.is_mirror_dummy = False
|
|
|
|
user_profile.set_unusable_password()
|
|
|
|
user_profile.date_joined = timezone_now()
|
|
|
|
user_profile.tos_version = settings.TERMS_OF_SERVICE_VERSION
|
|
|
|
user_profile.save(
|
|
|
|
update_fields=["date_joined", "password", "is_mirror_dummy", "tos_version"]
|
|
|
|
)
|
|
|
|
|
|
|
|
event_time = user_profile.date_joined
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_ACTIVATED,
|
2022-04-14 23:53:15 +02:00
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2022-04-14 23:53:15 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-10-18 05:24:50 +02:00
|
|
|
notify_created_user(user_profile, [])
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_reactivate_user(user_profile: UserProfile, *, acting_user: UserProfile | None) -> None:
|
2022-04-14 23:53:15 +02:00
|
|
|
"""Reactivate a user that had previously been deactivated"""
|
2023-09-25 21:58:33 +02:00
|
|
|
if user_profile.is_mirror_dummy:
|
|
|
|
raise JsonableError(
|
|
|
|
_("Cannot activate a placeholder account; ask the user to sign up, instead.")
|
|
|
|
)
|
2022-08-15 15:54:50 +02:00
|
|
|
change_user_is_active(user_profile, True)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_REACTIVATED,
|
2022-08-15 15:54:50 +02:00
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-08-15 15:54:50 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2022-04-21 16:04:52 +02:00
|
|
|
|
|
|
|
bot_owner_changed = False
|
|
|
|
if (
|
|
|
|
user_profile.is_bot
|
|
|
|
and user_profile.bot_owner is not None
|
|
|
|
and not user_profile.bot_owner.is_active
|
|
|
|
and acting_user is not None
|
|
|
|
):
|
|
|
|
previous_owner = user_profile.bot_owner
|
|
|
|
user_profile.bot_owner = acting_user
|
|
|
|
user_profile.save() # Can't use update_fields because of how the foreign key works.
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=user_profile,
|
|
|
|
event_type=RealmAuditLog.USER_BOT_OWNER_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
bot_owner_changed = True
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2023-10-30 12:50:40 +01:00
|
|
|
event = dict(
|
|
|
|
type="realm_user", op="update", person=dict(user_id=user_profile.id, is_active=True)
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
|
|
|
if user_profile.is_bot:
|
2023-10-30 12:50:40 +01:00
|
|
|
event = dict(
|
|
|
|
type="realm_bot",
|
|
|
|
op="update",
|
|
|
|
bot=dict(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
is_active=True,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2022-04-14 23:53:15 +02:00
|
|
|
|
2022-04-21 16:04:52 +02:00
|
|
|
if bot_owner_changed:
|
|
|
|
from zerver.actions.bots import send_bot_owner_update_events
|
|
|
|
|
|
|
|
assert acting_user is not None
|
|
|
|
send_bot_owner_update_events(user_profile, acting_user, previous_owner)
|
|
|
|
|
2022-05-16 09:13:41 +02:00
|
|
|
if bot_owner_changed:
|
|
|
|
from zerver.actions.bots import remove_bot_from_inaccessible_private_streams
|
|
|
|
|
|
|
|
remove_bot_from_inaccessible_private_streams(user_profile, acting_user=acting_user)
|
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
subscribed_recipient_ids = Subscription.objects.filter(
|
|
|
|
user_profile_id=user_profile.id, active=True, recipient__type=Recipient.STREAM
|
|
|
|
).values_list("recipient__type_id", flat=True)
|
|
|
|
subscribed_streams = Stream.objects.filter(id__in=subscribed_recipient_ids, deactivated=False)
|
|
|
|
subscriber_peer_info = bulk_get_subscriber_peer_info(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
streams=subscribed_streams,
|
|
|
|
)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
altered_user_dict: dict[int, set[int]] = defaultdict(set)
|
2022-04-14 23:53:15 +02:00
|
|
|
for stream in subscribed_streams:
|
|
|
|
altered_user_dict[stream.id] = {user_profile.id}
|
|
|
|
|
|
|
|
stream_dict = {stream.id: stream for stream in subscribed_streams}
|
|
|
|
|
|
|
|
send_peer_subscriber_events(
|
|
|
|
op="peer_add",
|
|
|
|
realm=user_profile.realm,
|
|
|
|
altered_user_dict=altered_user_dict,
|
|
|
|
stream_dict=stream_dict,
|
2023-10-26 18:14:01 +02:00
|
|
|
subscriber_peer_info=subscriber_peer_info,
|
2022-04-14 23:53:15 +02:00
|
|
|
)
|