2022-05-29 22:51:23 +02:00
|
|
|
import secrets
|
2022-04-14 23:48:28 +02:00
|
|
|
from collections import defaultdict
|
2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2024-07-12 02:30:23 +02:00
|
|
|
from typing import Any
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
2024-09-23 20:24:45 +02:00
|
|
|
from django.contrib.auth.tokens import PasswordResetTokenGenerator, default_token_generator
|
2022-04-14 23:48:28 +02:00
|
|
|
from django.db import transaction
|
2024-09-23 16:15:17 +02:00
|
|
|
from django.db.models import Q
|
2024-09-23 20:24:45 +02:00
|
|
|
from django.http import HttpRequest
|
|
|
|
from django.urls import reverse
|
|
|
|
from django.utils.http import urlsafe_base64_encode
|
2022-04-14 23:48:28 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2024-09-23 20:24:45 +02:00
|
|
|
from django.utils.translation import get_language
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
from zerver.actions.user_groups import (
|
|
|
|
do_send_user_group_members_update_event,
|
|
|
|
update_users_in_full_members_system_group,
|
|
|
|
)
|
2024-06-13 07:05:14 +02:00
|
|
|
from zerver.lib.avatar import get_avatar_field
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.lib.bot_config import ConfigError, get_bot_config, get_bot_configs, set_bot_config
|
|
|
|
from zerver.lib.cache import bot_dict_fields
|
|
|
|
from zerver.lib.create_user import create_user
|
2024-04-30 22:57:29 +02:00
|
|
|
from zerver.lib.invites import revoke_invites_generated_by_user
|
2023-12-09 15:00:30 +01:00
|
|
|
from zerver.lib.remote_server import maybe_enqueue_audit_log_upload
|
2024-09-23 20:24:45 +02:00
|
|
|
from zerver.lib.send_email import FromAddress, clear_scheduled_emails, send_email
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.lib.sessions import delete_user_sessions
|
2024-09-23 20:24:45 +02:00
|
|
|
from zerver.lib.soft_deactivation import queue_soft_reactivation
|
2023-07-06 08:38:12 +02:00
|
|
|
from zerver.lib.stream_subscription import bulk_get_subscriber_peer_info
|
|
|
|
from zerver.lib.stream_traffic import get_streams_traffic
|
2024-11-18 05:41:08 +01:00
|
|
|
from zerver.lib.streams import (
|
|
|
|
get_group_setting_value_dict_for_streams,
|
|
|
|
get_streams_for_user,
|
|
|
|
stream_to_dict,
|
|
|
|
)
|
2024-11-13 11:54:11 +01:00
|
|
|
from zerver.lib.types import AnonymousSettingGroupDict
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.lib.user_counts import realm_user_count_by_role
|
2024-11-13 11:54:11 +01:00
|
|
|
from zerver.lib.user_groups import get_system_user_group_for_user
|
2023-11-13 17:11:13 +01:00
|
|
|
from zerver.lib.users import (
|
|
|
|
get_active_bots_owned_by_user,
|
|
|
|
get_user_ids_who_can_access_user,
|
|
|
|
get_users_involved_in_dms_with_target_users,
|
|
|
|
user_access_restricted_in_realm,
|
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.models import (
|
2024-09-23 16:15:17 +02:00
|
|
|
GroupGroupMembership,
|
2022-05-29 22:51:23 +02:00
|
|
|
Message,
|
2024-09-23 16:15:17 +02:00
|
|
|
NamedUserGroup,
|
2022-04-14 23:48:28 +02:00
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Service,
|
2023-07-06 08:38:12 +02:00
|
|
|
Stream,
|
2022-04-14 23:48:28 +02:00
|
|
|
Subscription,
|
2024-09-23 16:15:17 +02:00
|
|
|
UserGroup,
|
2022-04-14 23:48:28 +02:00
|
|
|
UserGroupMembership,
|
|
|
|
UserProfile,
|
2023-12-15 01:16:00 +01:00
|
|
|
)
|
2023-12-15 21:00:29 +01:00
|
|
|
from zerver.models.bots import get_bot_services
|
2024-08-30 18:15:41 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_fake_email_domain
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import (
|
2023-11-13 17:11:13 +01:00
|
|
|
active_non_guest_user_ids,
|
|
|
|
active_user_ids,
|
2022-04-14 23:48:28 +02:00
|
|
|
bot_owner_user_ids,
|
|
|
|
get_bot_dicts_in_realm,
|
|
|
|
get_user_profile_by_id,
|
|
|
|
)
|
2024-08-23 10:32:27 +02:00
|
|
|
from zerver.tornado.django_api import send_event_on_commit
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_delete_user(user_profile: UserProfile, *, acting_user: UserProfile | None) -> None:
|
2022-04-14 23:48:28 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm:
|
|
|
|
raise AssertionError("Deleting zephyr mirror users is not supported")
|
|
|
|
|
2022-04-16 00:21:07 +02:00
|
|
|
do_deactivate_user(user_profile, acting_user=acting_user)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2024-07-08 16:46:01 +02:00
|
|
|
to_resubscribe_recipient_ids = set(
|
2022-04-14 23:48:28 +02:00
|
|
|
Subscription.objects.filter(
|
2024-03-22 00:39:33 +01:00
|
|
|
user_profile=user_profile, recipient__type=Recipient.DIRECT_MESSAGE_GROUP
|
2022-04-14 23:48:28 +02:00
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
)
|
|
|
|
user_id = user_profile.id
|
|
|
|
realm = user_profile.realm
|
|
|
|
date_joined = user_profile.date_joined
|
|
|
|
personal_recipient = user_profile.recipient
|
|
|
|
|
2024-11-04 06:19:11 +01:00
|
|
|
with transaction.atomic(durable=True):
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile.delete()
|
|
|
|
# Recipient objects don't get deleted through CASCADE, so we need to handle
|
|
|
|
# the user's personal recipient manually. This will also delete all Messages pointing
|
2023-06-19 16:42:11 +02:00
|
|
|
# to this recipient (all direct messages sent to the user).
|
2022-04-14 23:48:28 +02:00
|
|
|
assert personal_recipient is not None
|
|
|
|
personal_recipient.delete()
|
|
|
|
replacement_user = create_user(
|
|
|
|
force_id=user_id,
|
2022-07-27 23:33:49 +02:00
|
|
|
email=Address(
|
2023-11-07 08:12:19 +01:00
|
|
|
username=f"deleteduser{user_id}", domain=get_fake_email_domain(realm.host)
|
2022-07-27 23:33:49 +02:00
|
|
|
).addr_spec,
|
2022-04-14 23:48:28 +02:00
|
|
|
password=None,
|
|
|
|
realm=realm,
|
|
|
|
full_name=f"Deleted User {user_id}",
|
|
|
|
active=False,
|
|
|
|
is_mirror_dummy=True,
|
|
|
|
force_date_joined=date_joined,
|
|
|
|
)
|
|
|
|
subs_to_recreate = [
|
|
|
|
Subscription(
|
|
|
|
user_profile=replacement_user,
|
|
|
|
recipient=recipient,
|
|
|
|
is_user_active=replacement_user.is_active,
|
|
|
|
)
|
2024-07-08 16:46:01 +02:00
|
|
|
for recipient in Recipient.objects.filter(id__in=to_resubscribe_recipient_ids)
|
2022-04-14 23:48:28 +02:00
|
|
|
]
|
|
|
|
Subscription.objects.bulk_create(subs_to_recreate)
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=replacement_user.realm,
|
|
|
|
modified_user=replacement_user,
|
2022-04-16 00:21:07 +02:00
|
|
|
acting_user=acting_user,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_DELETED,
|
2022-04-14 23:48:28 +02:00
|
|
|
event_time=timezone_now(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-05-29 22:51:23 +02:00
|
|
|
def do_delete_user_preserving_messages(user_profile: UserProfile) -> None:
|
|
|
|
"""This is a version of do_delete_user which does not delete messages
|
|
|
|
that the user was a participant in, and thus is less potentially
|
|
|
|
disruptive to other users.
|
|
|
|
|
|
|
|
The code is a bit tricky, because we want to, at some point, call
|
|
|
|
user_profile.delete() to trigger cascading deletions of related
|
|
|
|
models - but we need to avoid the cascades deleting all messages
|
|
|
|
sent by the user to avoid messing up history of public stream
|
|
|
|
conversations that they may have participated in.
|
|
|
|
|
|
|
|
Not recommended for general use due to the following quirks:
|
2024-09-13 10:36:58 +02:00
|
|
|
* Does not live-update other clients via `send_event_on_commit`
|
|
|
|
about the user's new name, email, or other attributes.
|
2022-05-29 22:51:23 +02:00
|
|
|
* Not guaranteed to clear caches containing the deleted users. The
|
|
|
|
temporary user may be visible briefly in caches due to the
|
|
|
|
UserProfile model's post_save hook.
|
|
|
|
* Deletes `acting_user`/`modified_user` entries in RealmAuditLog,
|
|
|
|
potentially leading to corruption in audit tables if the user had,
|
|
|
|
for example, changed organization-level settings previously.
|
|
|
|
* May violate invariants like deleting the only subscriber to a
|
|
|
|
stream/group or the last owner in a realm.
|
|
|
|
* Will remove MutedUser records for other users who might have
|
|
|
|
muted this user.
|
|
|
|
* Will destroy Attachment/ArchivedAttachment records for files
|
|
|
|
uploaded by the user, making them inaccessible.
|
|
|
|
* Will destroy ArchivedMessage records associated with the user,
|
|
|
|
making them impossible to restore from backups.
|
|
|
|
* Will destroy Reaction/Submessage objects for reactions/poll
|
|
|
|
votes done by the user.
|
|
|
|
|
|
|
|
Most of these issues are not relevant for the common case that the
|
|
|
|
user being deleted hasn't used Zulip extensively.
|
|
|
|
|
|
|
|
It is possible a different algorithm that worked via overwriting
|
|
|
|
the UserProfile's values with RealmUserDefault values, as well as
|
|
|
|
a targeted set of deletions of cascading models (`Subscription`,
|
|
|
|
`UserMessage`, `CustomProfileFieldValue`, etc.) would be a cleaner
|
|
|
|
path to a high quality system.
|
|
|
|
|
|
|
|
Other lesser quirks to be aware of:
|
|
|
|
* The deleted user will disappear from all "Read receipts"
|
|
|
|
displays, as all UserMessage rows will have been deleted.
|
|
|
|
* Raw Markdown syntax mentioning the user still contain their
|
|
|
|
original name (though modern clients will look up the user via
|
|
|
|
`data-user-id` and display the current name). This is hard to
|
|
|
|
change, and not important, since nothing prevents other users from
|
|
|
|
just typing the user's name in their own messages.
|
|
|
|
* Consumes a user ID sequence number, resulting in gaps in the
|
|
|
|
space of user IDs that contain actual users.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if user_profile.realm.is_zephyr_mirror_realm:
|
|
|
|
raise AssertionError("Deleting zephyr mirror users is not supported")
|
|
|
|
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
|
|
|
|
|
|
|
user_id = user_profile.id
|
|
|
|
personal_recipient = user_profile.recipient
|
|
|
|
realm = user_profile.realm
|
|
|
|
date_joined = user_profile.date_joined
|
|
|
|
|
2024-11-04 06:19:11 +01:00
|
|
|
with transaction.atomic(durable=True):
|
2022-05-29 22:51:23 +02:00
|
|
|
# The strategy is that before calling user_profile.delete(), we need to
|
|
|
|
# reassign Messages sent by the user to a dummy user, so that they don't
|
|
|
|
# get affected by CASCADE. We cannot yet create a dummy user with .id
|
|
|
|
# matching that of the user_profile, so the general scheme is:
|
|
|
|
# 1. We create a *temporary* dummy for the initial re-assignment of messages.
|
|
|
|
# 2. We delete the UserProfile.
|
|
|
|
# 3. We create a replacement dummy user with its id matching what the UserProfile had.
|
|
|
|
# 4. This is the intended, final replacement UserProfile, so we re-assign
|
|
|
|
# the messages from step (1) to it and delete the temporary dummy.
|
|
|
|
#
|
|
|
|
# We also do the same for Subscriptions - while they could be handled like
|
|
|
|
# in do_delete_user by re-creating the objects after CASCADE deletion, the code
|
|
|
|
# is cleaner by using the same re-assignment approach for them together with Messages.
|
|
|
|
random_token = secrets.token_hex(16)
|
|
|
|
temp_replacement_user = create_user(
|
2024-09-29 18:57:05 +02:00
|
|
|
email=Address(
|
|
|
|
username=f"temp_deleteduser{random_token}", domain=get_fake_email_domain(realm.host)
|
|
|
|
).addr_spec,
|
2022-05-29 22:51:23 +02:00
|
|
|
password=None,
|
|
|
|
realm=realm,
|
|
|
|
full_name=f"Deleted User {user_id} (temp)",
|
|
|
|
active=False,
|
|
|
|
is_mirror_dummy=True,
|
|
|
|
force_date_joined=date_joined,
|
|
|
|
create_personal_recipient=False,
|
|
|
|
)
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_realm_sender_recipient (prefix)
|
|
|
|
Message.objects.filter(realm_id=realm.id, sender=user_profile).update(
|
|
|
|
sender=temp_replacement_user
|
|
|
|
)
|
2022-05-29 22:51:23 +02:00
|
|
|
Subscription.objects.filter(
|
2024-03-22 00:39:33 +01:00
|
|
|
user_profile=user_profile, recipient__type=Recipient.DIRECT_MESSAGE_GROUP
|
2022-05-29 22:51:23 +02:00
|
|
|
).update(user_profile=temp_replacement_user)
|
|
|
|
user_profile.delete()
|
|
|
|
|
|
|
|
replacement_user = create_user(
|
|
|
|
force_id=user_id,
|
2024-09-29 18:57:05 +02:00
|
|
|
email=Address(
|
|
|
|
username=f"deleteduser{user_id}", domain=get_fake_email_domain(realm.host)
|
|
|
|
).addr_spec,
|
2022-05-29 22:51:23 +02:00
|
|
|
password=None,
|
|
|
|
realm=realm,
|
|
|
|
full_name=f"Deleted User {user_id}",
|
|
|
|
active=False,
|
|
|
|
is_mirror_dummy=True,
|
|
|
|
force_date_joined=date_joined,
|
|
|
|
create_personal_recipient=False,
|
|
|
|
)
|
|
|
|
# We don't delete the personal recipient to preserve personal messages!
|
|
|
|
# Now, the personal recipient belong to replacement_user, because
|
|
|
|
# personal_recipient.type_id is equal to replacement_user.id.
|
|
|
|
replacement_user.recipient = personal_recipient
|
|
|
|
replacement_user.save(update_fields=["recipient"])
|
|
|
|
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_realm_sender_recipient (prefix)
|
|
|
|
Message.objects.filter(realm_id=realm.id, sender=temp_replacement_user).update(
|
|
|
|
sender=replacement_user
|
|
|
|
)
|
2022-05-29 22:51:23 +02:00
|
|
|
Subscription.objects.filter(
|
2024-03-22 00:39:33 +01:00
|
|
|
user_profile=temp_replacement_user, recipient__type=Recipient.DIRECT_MESSAGE_GROUP
|
2022-05-29 22:51:23 +02:00
|
|
|
).update(user_profile=replacement_user, is_user_active=replacement_user.is_active)
|
|
|
|
temp_replacement_user.delete()
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=replacement_user.realm,
|
|
|
|
modified_user=replacement_user,
|
|
|
|
acting_user=None,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_DELETED_PRESERVING_MESSAGES,
|
2022-05-29 22:51:23 +02:00
|
|
|
event_time=timezone_now(),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
def change_user_is_active(user_profile: UserProfile, value: bool) -> None:
|
|
|
|
"""
|
|
|
|
Helper function for changing the .is_active field. Not meant as a standalone function
|
|
|
|
in production code as properly activating/deactivating users requires more steps.
|
|
|
|
This changes the is_active value and saves it, while ensuring
|
|
|
|
Subscription.is_user_active values are updated in the same db transaction.
|
|
|
|
"""
|
|
|
|
with transaction.atomic(savepoint=False):
|
|
|
|
user_profile.is_active = value
|
|
|
|
user_profile.save(update_fields=["is_active"])
|
|
|
|
Subscription.objects.filter(user_profile=user_profile).update(is_user_active=value)
|
|
|
|
|
|
|
|
|
2024-09-23 16:15:17 +02:00
|
|
|
def send_group_update_event_for_anonymous_group_setting(
|
|
|
|
setting_group: UserGroup,
|
|
|
|
group_members_dict: dict[int, list[int]],
|
|
|
|
group_subgroups_dict: dict[int, list[int]],
|
|
|
|
named_group: NamedUserGroup,
|
|
|
|
notify_user_ids: list[int],
|
|
|
|
) -> None:
|
|
|
|
realm = setting_group.realm
|
|
|
|
for setting_name in NamedUserGroup.GROUP_PERMISSION_SETTINGS:
|
|
|
|
if getattr(named_group, setting_name + "_id") == setting_group.id:
|
|
|
|
new_setting_value = AnonymousSettingGroupDict(
|
|
|
|
direct_members=group_members_dict[setting_group.id],
|
|
|
|
direct_subgroups=group_subgroups_dict[setting_group.id],
|
|
|
|
)
|
|
|
|
event = dict(
|
|
|
|
type="user_group",
|
|
|
|
op="update",
|
|
|
|
group_id=named_group.id,
|
|
|
|
data={setting_name: new_setting_value},
|
|
|
|
)
|
|
|
|
send_event_on_commit(realm, event, notify_user_ids)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
def send_realm_update_event_for_anonymous_group_setting(
|
|
|
|
setting_group: UserGroup,
|
|
|
|
group_members_dict: dict[int, list[int]],
|
|
|
|
group_subgroups_dict: dict[int, list[int]],
|
|
|
|
notify_user_ids: list[int],
|
|
|
|
) -> None:
|
|
|
|
realm = setting_group.realm
|
|
|
|
for setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS:
|
|
|
|
if getattr(realm, setting_name + "_id") == setting_group.id:
|
|
|
|
new_setting_value = AnonymousSettingGroupDict(
|
|
|
|
direct_members=group_members_dict[setting_group.id],
|
|
|
|
direct_subgroups=group_subgroups_dict[setting_group.id],
|
|
|
|
)
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
|
|
|
data={setting_name: new_setting_value},
|
|
|
|
)
|
|
|
|
send_event_on_commit(realm, event, notify_user_ids)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
def send_update_events_for_anonymous_group_settings(
|
|
|
|
setting_groups: list[UserGroup], realm: Realm, notify_user_ids: list[int]
|
|
|
|
) -> None:
|
|
|
|
setting_group_ids = [group.id for group in setting_groups]
|
|
|
|
membership = (
|
|
|
|
UserGroupMembership.objects.filter(user_group_id__in=setting_group_ids)
|
|
|
|
.exclude(user_profile__is_active=False)
|
|
|
|
.values_list("user_group_id", "user_profile_id")
|
|
|
|
)
|
|
|
|
|
|
|
|
group_membership = GroupGroupMembership.objects.filter(
|
|
|
|
supergroup_id__in=setting_group_ids
|
|
|
|
).values_list("subgroup_id", "supergroup_id")
|
|
|
|
|
|
|
|
group_members = defaultdict(list)
|
|
|
|
for user_group_id, user_profile_id in membership:
|
|
|
|
group_members[user_group_id].append(user_profile_id)
|
|
|
|
|
|
|
|
group_subgroups = defaultdict(list)
|
|
|
|
for subgroup_id, supergroup_id in group_membership:
|
|
|
|
group_subgroups[supergroup_id].append(subgroup_id)
|
|
|
|
|
|
|
|
group_setting_query = Q()
|
|
|
|
for setting_name in NamedUserGroup.GROUP_PERMISSION_SETTINGS:
|
|
|
|
group_setting_query |= Q(**{f"{setting_name}__in": setting_group_ids})
|
|
|
|
|
|
|
|
named_groups_using_setting_groups_dict = {}
|
|
|
|
named_groups_using_setting_groups = NamedUserGroup.objects.filter(realm=realm).filter(
|
|
|
|
group_setting_query
|
|
|
|
)
|
|
|
|
for group in named_groups_using_setting_groups:
|
|
|
|
for setting_name in NamedUserGroup.GROUP_PERMISSION_SETTINGS:
|
|
|
|
setting_value_id = getattr(group, setting_name + "_id")
|
|
|
|
if setting_value_id in setting_group_ids:
|
|
|
|
named_groups_using_setting_groups_dict[setting_value_id] = group
|
|
|
|
|
|
|
|
for setting_group in setting_groups:
|
|
|
|
if setting_group.id in named_groups_using_setting_groups_dict:
|
|
|
|
named_group = named_groups_using_setting_groups_dict[setting_group.id]
|
|
|
|
send_group_update_event_for_anonymous_group_setting(
|
|
|
|
setting_group,
|
|
|
|
group_members,
|
|
|
|
group_subgroups,
|
|
|
|
named_group,
|
|
|
|
notify_user_ids,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
send_realm_update_event_for_anonymous_group_setting(
|
|
|
|
setting_group,
|
|
|
|
group_members,
|
|
|
|
group_subgroups,
|
|
|
|
notify_user_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-11-13 17:11:13 +01:00
|
|
|
def send_events_for_user_deactivation(user_profile: UserProfile) -> None:
|
|
|
|
event_deactivate_user = dict(
|
|
|
|
type="realm_user",
|
|
|
|
op="update",
|
|
|
|
person=dict(user_id=user_profile.id, is_active=False),
|
|
|
|
)
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
|
|
|
if not user_access_restricted_in_realm(user_profile):
|
|
|
|
send_event_on_commit(realm, event_deactivate_user, active_user_ids(realm.id))
|
|
|
|
return
|
|
|
|
|
|
|
|
non_guest_user_ids = active_non_guest_user_ids(realm.id)
|
|
|
|
users_involved_in_dms_dict = get_users_involved_in_dms_with_target_users([user_profile], realm)
|
|
|
|
|
|
|
|
# This code path is parallel to
|
|
|
|
# get_subscribers_of_target_user_subscriptions, but can't reuse it
|
2024-07-08 16:46:01 +02:00
|
|
|
# because we need to process stream and direct_message_group
|
|
|
|
# subscriptions separately.
|
2023-11-13 17:11:13 +01:00
|
|
|
deactivated_user_subs = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
2023-11-13 17:11:13 +01:00
|
|
|
active=True,
|
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
subscribers_in_deactivated_user_subs = Subscription.objects.filter(
|
|
|
|
recipient_id__in=list(deactivated_user_subs),
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
2023-11-13 17:11:13 +01:00
|
|
|
is_user_active=True,
|
|
|
|
active=True,
|
|
|
|
).values_list("recipient__type", "user_profile_id")
|
|
|
|
|
2024-07-08 16:46:01 +02:00
|
|
|
peer_stream_subscribers = set()
|
|
|
|
peer_direct_message_group_subscribers = set()
|
2023-11-13 17:11:13 +01:00
|
|
|
for recipient_type, user_id in subscribers_in_deactivated_user_subs:
|
2024-03-22 00:39:33 +01:00
|
|
|
if recipient_type == Recipient.DIRECT_MESSAGE_GROUP:
|
2024-07-08 16:46:01 +02:00
|
|
|
peer_direct_message_group_subscribers.add(user_id)
|
2023-11-13 17:11:13 +01:00
|
|
|
else:
|
2024-07-08 16:46:01 +02:00
|
|
|
peer_stream_subscribers.add(user_id)
|
2023-11-13 17:11:13 +01:00
|
|
|
|
|
|
|
users_with_access_to_deactivated_user = (
|
|
|
|
set(non_guest_user_ids)
|
|
|
|
| users_involved_in_dms_dict[user_profile.id]
|
2024-07-08 16:46:01 +02:00
|
|
|
| peer_direct_message_group_subscribers
|
2023-11-13 17:11:13 +01:00
|
|
|
)
|
|
|
|
if users_with_access_to_deactivated_user:
|
|
|
|
send_event_on_commit(
|
|
|
|
realm, event_deactivate_user, list(users_with_access_to_deactivated_user)
|
|
|
|
)
|
|
|
|
|
2024-03-25 11:31:08 +01:00
|
|
|
all_active_user_ids = active_user_ids(realm.id)
|
|
|
|
users_without_access_to_deactivated_user = (
|
|
|
|
set(all_active_user_ids) - users_with_access_to_deactivated_user
|
|
|
|
)
|
|
|
|
if users_without_access_to_deactivated_user:
|
|
|
|
# Guests who have access to the deactivated user receive
|
|
|
|
# 'realm_user/update' event and can update the user groups
|
|
|
|
# data, but guests who cannot access the deactivated user
|
|
|
|
# need an explicit 'user_group/remove_members' event to
|
|
|
|
# update the user groups data.
|
2024-10-11 19:35:34 +02:00
|
|
|
deactivated_user_groups = user_profile.direct_groups.select_related(
|
|
|
|
"named_user_group"
|
|
|
|
).order_by("id")
|
2024-09-23 16:15:17 +02:00
|
|
|
deactivated_user_named_groups = []
|
|
|
|
deactivated_user_setting_groups = []
|
|
|
|
for group in deactivated_user_groups:
|
|
|
|
if not hasattr(group, "named_user_group"):
|
|
|
|
deactivated_user_setting_groups.append(group)
|
|
|
|
else:
|
|
|
|
deactivated_user_named_groups.append(group)
|
|
|
|
for user_group in deactivated_user_named_groups:
|
2024-03-25 11:31:08 +01:00
|
|
|
event = dict(
|
|
|
|
type="user_group",
|
|
|
|
op="remove_members",
|
|
|
|
group_id=user_group.id,
|
|
|
|
user_ids=[user_profile.id],
|
|
|
|
)
|
|
|
|
send_event_on_commit(
|
|
|
|
user_group.realm, event, list(users_without_access_to_deactivated_user)
|
|
|
|
)
|
|
|
|
|
2024-09-23 16:15:17 +02:00
|
|
|
if deactivated_user_setting_groups:
|
|
|
|
send_update_events_for_anonymous_group_settings(
|
|
|
|
deactivated_user_setting_groups,
|
|
|
|
user_profile.realm,
|
|
|
|
list(users_without_access_to_deactivated_user),
|
|
|
|
)
|
|
|
|
|
2023-11-13 17:11:13 +01:00
|
|
|
users_losing_access_to_deactivated_user = (
|
2024-07-08 16:46:01 +02:00
|
|
|
peer_stream_subscribers - users_with_access_to_deactivated_user
|
2023-11-13 17:11:13 +01:00
|
|
|
)
|
|
|
|
if users_losing_access_to_deactivated_user:
|
|
|
|
event_remove_user = dict(
|
|
|
|
type="realm_user",
|
|
|
|
op="remove",
|
|
|
|
person=dict(user_id=user_profile.id, full_name=str(UserProfile.INACCESSIBLE_USER_NAME)),
|
|
|
|
)
|
|
|
|
send_event_on_commit(
|
|
|
|
realm, event_remove_user, list(users_losing_access_to_deactivated_user)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
def do_deactivate_user(
|
2024-07-12 02:30:23 +02:00
|
|
|
user_profile: UserProfile, _cascade: bool = True, *, acting_user: UserProfile | None
|
2022-04-14 23:48:28 +02:00
|
|
|
) -> None:
|
|
|
|
if not user_profile.is_active:
|
|
|
|
return
|
|
|
|
|
2024-09-24 23:27:28 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
if _cascade:
|
|
|
|
# We need to deactivate bots before the target user, to ensure
|
|
|
|
# that a failure partway through this function cannot result
|
|
|
|
# in only the user being deactivated.
|
|
|
|
bot_profiles = get_active_bots_owned_by_user(user_profile)
|
|
|
|
for profile in bot_profiles:
|
|
|
|
do_deactivate_user(profile, _cascade=False, acting_user=acting_user)
|
|
|
|
|
2024-11-04 09:30:30 +01:00
|
|
|
with transaction.atomic(savepoint=False):
|
2022-04-14 23:48:28 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm: # nocoverage
|
|
|
|
# For zephyr mirror users, we need to make them a mirror dummy
|
|
|
|
# again; otherwise, other users won't get the correct behavior
|
|
|
|
# when trying to send messages to this person inside Zulip.
|
|
|
|
#
|
|
|
|
# Ideally, we need to also ensure their zephyr mirroring bot
|
|
|
|
# isn't running, but that's a separate issue.
|
|
|
|
user_profile.is_mirror_dummy = True
|
|
|
|
user_profile.save(update_fields=["is_mirror_dummy"])
|
|
|
|
|
|
|
|
change_user_is_active(user_profile, False)
|
|
|
|
|
|
|
|
clear_scheduled_emails(user_profile.id)
|
|
|
|
revoke_invites_generated_by_user(user_profile)
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_DEACTIVATED,
|
2022-04-14 23:48:28 +02:00
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-04-14 23:48:28 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2022-04-14 23:48:28 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
2023-12-05 21:09:28 +01:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(event_time)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2022-10-08 21:10:45 +02:00
|
|
|
transaction.on_commit(lambda: delete_user_sessions(user_profile))
|
|
|
|
|
2023-11-13 17:11:13 +01:00
|
|
|
send_events_for_user_deactivation(user_profile)
|
2022-10-10 20:43:15 +02:00
|
|
|
|
|
|
|
if user_profile.is_bot:
|
2023-10-30 12:50:40 +01:00
|
|
|
event_deactivate_bot = dict(
|
2022-10-10 20:43:15 +02:00
|
|
|
type="realm_bot",
|
2023-10-30 12:50:40 +01:00
|
|
|
op="update",
|
|
|
|
bot=dict(user_id=user_profile.id, is_active=False),
|
2022-10-10 20:43:15 +02:00
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(
|
2023-10-30 12:50:40 +01:00
|
|
|
user_profile.realm, event_deactivate_bot, bot_owner_user_ids(user_profile)
|
2022-10-10 20:43:15 +02:00
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
|
2023-07-06 08:38:12 +02:00
|
|
|
def send_stream_events_for_role_update(
|
2024-07-12 02:30:17 +02:00
|
|
|
user_profile: UserProfile, old_accessible_streams: list[Stream]
|
2023-07-06 08:38:12 +02:00
|
|
|
) -> None:
|
|
|
|
current_accessible_streams = get_streams_for_user(
|
|
|
|
user_profile,
|
|
|
|
include_all_active=user_profile.is_realm_admin,
|
|
|
|
include_web_public=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
old_accessible_stream_ids = {stream.id for stream in old_accessible_streams}
|
|
|
|
current_accessible_stream_ids = {stream.id for stream in current_accessible_streams}
|
|
|
|
|
|
|
|
now_accessible_stream_ids = current_accessible_stream_ids - old_accessible_stream_ids
|
|
|
|
if now_accessible_stream_ids:
|
|
|
|
recent_traffic = get_streams_traffic(now_accessible_stream_ids, user_profile.realm)
|
|
|
|
|
|
|
|
now_accessible_streams = [
|
|
|
|
stream
|
|
|
|
for stream in current_accessible_streams
|
|
|
|
if stream.id in now_accessible_stream_ids
|
|
|
|
]
|
2024-11-18 05:41:08 +01:00
|
|
|
|
|
|
|
setting_groups_dict = get_group_setting_value_dict_for_streams(now_accessible_streams)
|
|
|
|
|
2023-07-06 08:38:12 +02:00
|
|
|
event = dict(
|
|
|
|
type="stream",
|
|
|
|
op="create",
|
2024-11-18 05:41:08 +01:00
|
|
|
streams=[
|
|
|
|
stream_to_dict(stream, recent_traffic, setting_groups_dict)
|
|
|
|
for stream in now_accessible_streams
|
|
|
|
],
|
2023-07-06 08:38:12 +02:00
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, [user_profile.id])
|
|
|
|
|
|
|
|
subscriber_peer_info = bulk_get_subscriber_peer_info(
|
|
|
|
user_profile.realm, now_accessible_streams
|
|
|
|
)
|
|
|
|
for stream_id, stream_subscriber_set in subscriber_peer_info.subscribed_ids.items():
|
|
|
|
peer_add_event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op="peer_add",
|
|
|
|
stream_ids=[stream_id],
|
|
|
|
user_ids=sorted(stream_subscriber_set),
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, peer_add_event, [user_profile.id])
|
|
|
|
|
|
|
|
now_inaccessible_stream_ids = old_accessible_stream_ids - current_accessible_stream_ids
|
|
|
|
if now_inaccessible_stream_ids:
|
|
|
|
now_inaccessible_streams = [
|
|
|
|
stream for stream in old_accessible_streams if stream.id in now_inaccessible_stream_ids
|
|
|
|
]
|
|
|
|
event = dict(
|
|
|
|
type="stream",
|
|
|
|
op="delete",
|
|
|
|
streams=[stream_to_dict(stream) for stream in now_inaccessible_streams],
|
|
|
|
)
|
|
|
|
send_event_on_commit(user_profile.realm, event, [user_profile.id])
|
|
|
|
|
|
|
|
|
2023-09-14 23:36:06 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2022-04-14 23:48:28 +02:00
|
|
|
def do_change_user_role(
|
2024-07-12 02:30:23 +02:00
|
|
|
user_profile: UserProfile, value: int, *, acting_user: UserProfile | None
|
2022-04-14 23:48:28 +02:00
|
|
|
) -> None:
|
2023-11-03 20:24:49 +01:00
|
|
|
# We want to both (a) take a lock on the UserProfile row, and (b)
|
|
|
|
# modify the passed-in UserProfile object, so that callers see the
|
|
|
|
# changes in the object they hold. Unfortunately,
|
|
|
|
# `select_for_update` cannot be combined with `refresh_from_db`
|
|
|
|
# (https://code.djangoproject.com/ticket/28344). Call
|
|
|
|
# `select_for_update` and throw away the result, so that we know
|
|
|
|
# we have the lock on the row, then re-fill the `user_profile`
|
|
|
|
# object with the values now that the lock exists.
|
|
|
|
UserProfile.objects.select_for_update().get(id=user_profile.id)
|
|
|
|
user_profile.refresh_from_db()
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
old_value = user_profile.role
|
2023-11-03 20:24:49 +01:00
|
|
|
if old_value == value:
|
|
|
|
return
|
2022-04-14 23:48:28 +02:00
|
|
|
old_system_group = get_system_user_group_for_user(user_profile)
|
|
|
|
|
2023-07-06 08:38:12 +02:00
|
|
|
previously_accessible_streams = get_streams_for_user(
|
|
|
|
user_profile,
|
|
|
|
include_web_public=True,
|
|
|
|
include_all_active=user_profile.is_realm_admin,
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile.role = value
|
|
|
|
user_profile.save(update_fields=["role"])
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
2024-08-30 18:15:41 +02:00
|
|
|
event_type=AuditLogEventType.USER_ROLE_CHANGED,
|
2022-04-14 23:48:28 +02:00
|
|
|
event_time=timezone_now(),
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
|
|
|
|
},
|
2022-04-14 23:48:28 +02:00
|
|
|
)
|
2023-12-09 15:00:30 +01:00
|
|
|
maybe_enqueue_audit_log_upload(user_profile.realm)
|
2024-09-04 21:27:08 +02:00
|
|
|
if settings.BILLING_ENABLED and UserProfile.ROLE_GUEST in [old_value, value]:
|
2024-09-24 23:27:28 +02:00
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
|
|
|
|
2024-09-04 21:27:08 +02:00
|
|
|
billing_session = RealmBillingSession(user=user_profile, realm=user_profile.realm)
|
|
|
|
billing_session.update_license_ledger_if_needed(timezone_now())
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm_user", op="update", person=dict(user_id=user_profile.id, role=user_profile.role)
|
|
|
|
)
|
2023-10-11 09:34:26 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, get_user_ids_who_can_access_user(user_profile))
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
UserGroupMembership.objects.filter(
|
|
|
|
user_profile=user_profile, user_group=old_system_group
|
|
|
|
).delete()
|
|
|
|
|
|
|
|
system_group = get_system_user_group_for_user(user_profile)
|
2022-11-21 07:26:34 +01:00
|
|
|
now = timezone_now()
|
2022-04-14 23:48:28 +02:00
|
|
|
UserGroupMembership.objects.create(user_profile=user_profile, user_group=system_group)
|
2022-11-21 07:26:34 +01:00
|
|
|
RealmAuditLog.objects.bulk_create(
|
|
|
|
[
|
|
|
|
RealmAuditLog(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=old_system_group,
|
2024-09-06 17:06:03 +02:00
|
|
|
event_type=AuditLogEventType.USER_GROUP_DIRECT_USER_MEMBERSHIP_REMOVED,
|
2022-11-21 07:26:34 +01:00
|
|
|
event_time=now,
|
|
|
|
acting_user=acting_user,
|
|
|
|
),
|
|
|
|
RealmAuditLog(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
|
|
|
modified_user_group=system_group,
|
2024-09-06 17:06:03 +02:00
|
|
|
event_type=AuditLogEventType.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
2022-11-21 07:26:34 +01:00
|
|
|
event_time=now,
|
|
|
|
acting_user=acting_user,
|
|
|
|
),
|
|
|
|
]
|
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
do_send_user_group_members_update_event("remove_members", old_system_group, [user_profile.id])
|
|
|
|
|
|
|
|
do_send_user_group_members_update_event("add_members", system_group, [user_profile.id])
|
|
|
|
|
|
|
|
if UserProfile.ROLE_MEMBER in [old_value, value]:
|
2022-11-21 03:30:07 +01:00
|
|
|
update_users_in_full_members_system_group(
|
|
|
|
user_profile.realm, [user_profile.id], acting_user=acting_user
|
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2023-07-06 08:38:12 +02:00
|
|
|
send_stream_events_for_role_update(user_profile, previously_accessible_streams)
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2024-08-23 10:32:27 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2023-11-15 22:30:08 +01:00
|
|
|
def do_change_is_billing_admin(user_profile: UserProfile, value: bool) -> None:
|
2024-08-13 01:24:26 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
old_value = user_profile.is_billing_admin
|
|
|
|
|
2023-11-15 22:30:08 +01:00
|
|
|
user_profile.is_billing_admin = value
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile.save(update_fields=["is_billing_admin"])
|
2024-08-13 01:24:26 +02:00
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
event_type=AuditLogEventType.USER_SPECIAL_PERMISSION_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=None,
|
|
|
|
modified_user=user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": "is_billing_admin",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
event = dict(
|
2023-11-15 22:30:08 +01:00
|
|
|
type="realm_user", op="update", person=dict(user_id=user_profile.id, is_billing_admin=value)
|
2022-04-14 23:48:28 +02:00
|
|
|
)
|
2024-08-23 10:32:27 +02:00
|
|
|
send_event_on_commit(user_profile.realm, event, get_user_ids_who_can_access_user(user_profile))
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
|
2024-08-13 01:24:26 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2022-04-14 23:48:28 +02:00
|
|
|
def do_change_can_forge_sender(user_profile: UserProfile, value: bool) -> None:
|
2024-08-13 01:24:26 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
old_value = user_profile.can_forge_sender
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile.can_forge_sender = value
|
|
|
|
user_profile.save(update_fields=["can_forge_sender"])
|
|
|
|
|
2024-08-13 01:24:26 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
event_type=AuditLogEventType.USER_SPECIAL_PERMISSION_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=None,
|
|
|
|
modified_user=user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": "can_forge_sender",
|
|
|
|
},
|
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2024-08-13 01:24:26 +02:00
|
|
|
|
|
|
|
@transaction.atomic(savepoint=False)
|
2022-04-14 23:48:28 +02:00
|
|
|
def do_change_can_create_users(user_profile: UserProfile, value: bool) -> None:
|
2024-08-13 01:24:26 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
old_value = user_profile.can_create_users
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile.can_create_users = value
|
|
|
|
user_profile.save(update_fields=["can_create_users"])
|
|
|
|
|
2024-08-13 01:24:26 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
event_type=AuditLogEventType.USER_SPECIAL_PERMISSION_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=None,
|
|
|
|
modified_user=user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": "can_create_users",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:48:28 +02:00
|
|
|
|
2024-09-03 21:41:18 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
|
|
|
def do_change_can_change_user_emails(user_profile: UserProfile, value: bool) -> None:
|
|
|
|
event_time = timezone_now()
|
|
|
|
old_value = user_profile.can_change_user_emails
|
|
|
|
|
|
|
|
user_profile.can_change_user_emails = value
|
|
|
|
user_profile.save(update_fields=["can_change_user_emails"])
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
event_type=AuditLogEventType.USER_SPECIAL_PERMISSION_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
acting_user=None,
|
|
|
|
modified_user=user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": "can_change_user_emails",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-08-23 07:56:36 +02:00
|
|
|
@transaction.atomic(durable=True)
|
2022-04-14 23:48:28 +02:00
|
|
|
def do_update_outgoing_webhook_service(
|
|
|
|
bot_profile: UserProfile, service_interface: int, service_payload_url: str
|
|
|
|
) -> None:
|
|
|
|
# TODO: First service is chosen because currently one bot can only have one service.
|
|
|
|
# Update this once multiple services are supported.
|
|
|
|
service = get_bot_services(bot_profile.id)[0]
|
|
|
|
service.base_url = service_payload_url
|
|
|
|
service.interface = service_interface
|
|
|
|
service.save()
|
2024-08-23 07:56:36 +02:00
|
|
|
send_event_on_commit(
|
2022-04-14 23:48:28 +02:00
|
|
|
bot_profile.realm,
|
|
|
|
dict(
|
|
|
|
type="realm_bot",
|
|
|
|
op="update",
|
|
|
|
bot=dict(
|
|
|
|
user_id=bot_profile.id,
|
|
|
|
services=[
|
|
|
|
dict(
|
|
|
|
base_url=service.base_url, interface=service.interface, token=service.token
|
|
|
|
)
|
|
|
|
],
|
|
|
|
),
|
|
|
|
),
|
|
|
|
bot_owner_user_ids(bot_profile),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-08-23 07:48:10 +02:00
|
|
|
@transaction.atomic(durable=True)
|
2024-07-12 02:30:17 +02:00
|
|
|
def do_update_bot_config_data(bot_profile: UserProfile, config_data: dict[str, str]) -> None:
|
2022-04-14 23:48:28 +02:00
|
|
|
for key, value in config_data.items():
|
|
|
|
set_bot_config(bot_profile, key, value)
|
|
|
|
updated_config_data = get_bot_config(bot_profile)
|
2024-08-23 07:48:10 +02:00
|
|
|
send_event_on_commit(
|
2022-04-14 23:48:28 +02:00
|
|
|
bot_profile.realm,
|
|
|
|
dict(
|
|
|
|
type="realm_bot",
|
|
|
|
op="update",
|
|
|
|
bot=dict(
|
|
|
|
user_id=bot_profile.id,
|
|
|
|
services=[dict(config_data=updated_config_data)],
|
|
|
|
),
|
|
|
|
),
|
|
|
|
bot_owner_user_ids(bot_profile),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_service_dicts_for_bot(user_profile_id: int) -> list[dict[str, Any]]:
|
2022-04-14 23:48:28 +02:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
|
|
|
services = get_bot_services(user_profile_id)
|
|
|
|
if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
2023-01-18 05:25:49 +01:00
|
|
|
return [
|
2022-04-14 23:48:28 +02:00
|
|
|
{
|
|
|
|
"base_url": service.base_url,
|
|
|
|
"interface": service.interface,
|
|
|
|
"token": service.token,
|
|
|
|
}
|
|
|
|
for service in services
|
|
|
|
]
|
|
|
|
elif user_profile.bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
try:
|
2023-01-18 05:25:49 +01:00
|
|
|
return [
|
2022-04-14 23:48:28 +02:00
|
|
|
{
|
|
|
|
"config_data": get_bot_config(user_profile),
|
|
|
|
"service_name": services[0].name,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
# A ConfigError just means that there are no config entries for user_profile.
|
|
|
|
except ConfigError:
|
2023-01-18 05:25:49 +01:00
|
|
|
return []
|
|
|
|
else:
|
|
|
|
return []
|
2022-04-14 23:48:28 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_service_dicts_for_bots(
|
2024-07-12 02:30:17 +02:00
|
|
|
bot_dicts: list[dict[str, Any]], realm: Realm
|
|
|
|
) -> dict[int, list[dict[str, Any]]]:
|
2022-04-14 23:48:28 +02:00
|
|
|
bot_profile_ids = [bot_dict["id"] for bot_dict in bot_dicts]
|
2024-07-12 02:30:17 +02:00
|
|
|
bot_services_by_uid: dict[int, list[Service]] = defaultdict(list)
|
2022-04-14 23:48:28 +02:00
|
|
|
for service in Service.objects.filter(user_profile_id__in=bot_profile_ids):
|
|
|
|
bot_services_by_uid[service.user_profile_id].append(service)
|
|
|
|
|
|
|
|
embedded_bot_ids = [
|
|
|
|
bot_dict["id"] for bot_dict in bot_dicts if bot_dict["bot_type"] == UserProfile.EMBEDDED_BOT
|
|
|
|
]
|
|
|
|
embedded_bot_configs = get_bot_configs(embedded_bot_ids)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
service_dicts_by_uid: dict[int, list[dict[str, Any]]] = {}
|
2022-04-14 23:48:28 +02:00
|
|
|
for bot_dict in bot_dicts:
|
|
|
|
bot_profile_id = bot_dict["id"]
|
|
|
|
bot_type = bot_dict["bot_type"]
|
|
|
|
services = bot_services_by_uid[bot_profile_id]
|
2024-07-12 02:30:17 +02:00
|
|
|
service_dicts: list[dict[str, Any]] = []
|
2022-04-14 23:48:28 +02:00
|
|
|
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
|
|
|
service_dicts = [
|
|
|
|
{
|
|
|
|
"base_url": service.base_url,
|
|
|
|
"interface": service.interface,
|
|
|
|
"token": service.token,
|
|
|
|
}
|
|
|
|
for service in services
|
|
|
|
]
|
2023-01-18 02:59:37 +01:00
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT and bot_profile_id in embedded_bot_configs:
|
|
|
|
bot_config = embedded_bot_configs[bot_profile_id]
|
|
|
|
service_dicts = [
|
|
|
|
{
|
|
|
|
"config_data": bot_config,
|
|
|
|
"service_name": services[0].name,
|
|
|
|
}
|
|
|
|
]
|
2022-04-14 23:48:28 +02:00
|
|
|
service_dicts_by_uid[bot_profile_id] = service_dicts
|
|
|
|
return service_dicts_by_uid
|
|
|
|
|
|
|
|
|
|
|
|
def get_owned_bot_dicts(
|
|
|
|
user_profile: UserProfile, include_all_realm_bots_if_admin: bool = True
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[dict[str, Any]]:
|
2022-04-14 23:48:28 +02:00
|
|
|
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
|
|
|
|
result = get_bot_dicts_in_realm(user_profile.realm)
|
|
|
|
else:
|
|
|
|
result = UserProfile.objects.filter(
|
|
|
|
realm=user_profile.realm, is_bot=True, bot_owner=user_profile
|
|
|
|
).values(*bot_dict_fields)
|
|
|
|
services_by_ids = get_service_dicts_for_bots(result, user_profile.realm)
|
|
|
|
return [
|
|
|
|
{
|
|
|
|
"email": botdict["email"],
|
|
|
|
"user_id": botdict["id"],
|
|
|
|
"full_name": botdict["full_name"],
|
|
|
|
"bot_type": botdict["bot_type"],
|
|
|
|
"is_active": botdict["is_active"],
|
|
|
|
"api_key": botdict["api_key"],
|
|
|
|
"default_sending_stream": botdict["default_sending_stream__name"],
|
|
|
|
"default_events_register_stream": botdict["default_events_register_stream__name"],
|
|
|
|
"default_all_public_streams": botdict["default_all_public_streams"],
|
|
|
|
"owner_id": botdict["bot_owner_id"],
|
2024-06-13 07:05:14 +02:00
|
|
|
"avatar_url": get_avatar_field(
|
|
|
|
user_id=botdict["id"],
|
|
|
|
realm_id=botdict["realm_id"],
|
|
|
|
email=botdict["email"],
|
|
|
|
avatar_source=botdict["avatar_source"],
|
|
|
|
avatar_version=botdict["avatar_version"],
|
|
|
|
medium=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
),
|
2022-04-14 23:48:28 +02:00
|
|
|
"services": services_by_ids[botdict["id"]],
|
|
|
|
}
|
|
|
|
for botdict in result
|
|
|
|
]
|
2024-09-23 20:24:45 +02:00
|
|
|
|
|
|
|
|
|
|
|
def generate_password_reset_url(
|
|
|
|
user_profile: UserProfile, token_generator: PasswordResetTokenGenerator
|
|
|
|
) -> str:
|
|
|
|
token = token_generator.make_token(user_profile)
|
|
|
|
uid = urlsafe_base64_encode(str(user_profile.id).encode())
|
|
|
|
endpoint = reverse("password_reset_confirm", kwargs=dict(uidb64=uid, token=token))
|
|
|
|
return f"{user_profile.realm.url}{endpoint}"
|
|
|
|
|
|
|
|
|
|
|
|
def do_send_password_reset_email(
|
|
|
|
email: str,
|
|
|
|
realm: Realm,
|
|
|
|
user_profile: UserProfile | None,
|
|
|
|
*,
|
|
|
|
token_generator: PasswordResetTokenGenerator = default_token_generator,
|
|
|
|
request: HttpRequest | None = None,
|
|
|
|
) -> None:
|
|
|
|
context: dict[str, object] = {
|
|
|
|
"email": email,
|
|
|
|
"realm_url": realm.url,
|
|
|
|
"realm_name": realm.name,
|
|
|
|
}
|
|
|
|
if user_profile is not None and not user_profile.is_active:
|
|
|
|
context["user_deactivated"] = True
|
|
|
|
user_profile = None
|
|
|
|
|
|
|
|
if user_profile is not None:
|
|
|
|
queue_soft_reactivation(user_profile.id)
|
|
|
|
context["active_account_in_realm"] = True
|
|
|
|
context["reset_url"] = generate_password_reset_url(user_profile, token_generator)
|
|
|
|
send_email(
|
|
|
|
"zerver/emails/password_reset",
|
|
|
|
to_user_ids=[user_profile.id],
|
|
|
|
from_name=FromAddress.security_email_from_name(user_profile=user_profile),
|
|
|
|
from_address=FromAddress.tokenized_no_reply_address(),
|
|
|
|
context=context,
|
|
|
|
realm=realm,
|
|
|
|
request=request,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
context["active_account_in_realm"] = False
|
|
|
|
active_accounts_in_other_realms = UserProfile.objects.filter(
|
|
|
|
delivery_email__iexact=email, is_active=True
|
|
|
|
)
|
|
|
|
if active_accounts_in_other_realms:
|
|
|
|
context["active_accounts_in_other_realms"] = active_accounts_in_other_realms
|
|
|
|
language = get_language()
|
|
|
|
|
|
|
|
send_email(
|
|
|
|
"zerver/emails/password_reset",
|
|
|
|
to_emails=[email],
|
|
|
|
from_name=FromAddress.security_email_from_name(language=language),
|
|
|
|
from_address=FromAddress.tokenized_no_reply_address(),
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
realm=realm,
|
|
|
|
request=request,
|
|
|
|
)
|