2022-08-03 20:06:59 +02:00
|
|
|
import logging
|
2024-07-12 02:30:32 +02:00
|
|
|
import zoneinfo
|
2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2024-07-12 02:30:23 +02:00
|
|
|
from typing import Any, Literal
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2023-09-25 22:59:44 +02:00
|
|
|
from django.utils.timezone import get_current_timezone_name as timezone_get_current_timezone_name
|
2022-04-14 23:57:15 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2024-02-02 23:32:27 +01:00
|
|
|
from django.utils.translation import gettext as _
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
from confirmation.models import Confirmation, create_confirmation_link, generate_key
|
|
|
|
from zerver.actions.custom_profile_fields import do_remove_realm_custom_profile_fields
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages_by_sender
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.user_groups import update_users_in_full_members_system_group
|
2021-10-27 20:39:45 +02:00
|
|
|
from zerver.actions.user_settings import do_delete_avatar_image
|
2024-02-02 23:32:27 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2022-12-23 14:56:27 +01:00
|
|
|
from zerver.lib.message import parse_message_time_limit_setting, update_first_visible_message_id
|
2020-05-13 18:09:17 +02:00
|
|
|
from zerver.lib.retention import move_messages_to_archive
|
2023-09-25 22:59:44 +02:00
|
|
|
from zerver.lib.send_email import FromAddress, send_email, send_email_to_admins
|
2023-12-11 04:39:12 +01:00
|
|
|
from zerver.lib.sessions import delete_realm_user_sessions
|
2023-11-23 22:07:41 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
|
2023-09-25 22:59:44 +02:00
|
|
|
from zerver.lib.timezone import canonicalize_timezone
|
2024-11-13 11:54:11 +01:00
|
|
|
from zerver.lib.types import AnonymousSettingGroupDict
|
2023-02-28 04:44:29 +01:00
|
|
|
from zerver.lib.upload import delete_message_attachments
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.lib.user_counts import realm_user_count_by_role
|
2024-05-23 16:21:25 +02:00
|
|
|
from zerver.lib.user_groups import (
|
|
|
|
get_group_setting_value_for_api,
|
|
|
|
get_group_setting_value_for_audit_log_data,
|
|
|
|
)
|
2024-10-11 06:38:17 +02:00
|
|
|
from zerver.lib.utils import optional_bytes_to_mib
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.models import (
|
2023-02-28 03:35:01 +01:00
|
|
|
ArchivedAttachment,
|
2022-04-14 23:57:15 +02:00
|
|
|
Attachment,
|
2020-05-13 18:09:17 +02:00
|
|
|
Message,
|
2024-04-02 18:39:18 +02:00
|
|
|
NamedUserGroup,
|
2022-04-14 23:57:15 +02:00
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
2023-04-16 21:53:22 +02:00
|
|
|
RealmAuthenticationMethod,
|
2022-07-26 15:48:26 +02:00
|
|
|
RealmReactivationStatus,
|
2022-04-14 23:57:15 +02:00
|
|
|
RealmUserDefault,
|
2020-05-13 18:09:17 +02:00
|
|
|
Recipient,
|
2022-04-14 23:57:15 +02:00
|
|
|
ScheduledEmail,
|
|
|
|
Stream,
|
2020-05-13 18:09:17 +02:00
|
|
|
Subscription,
|
2023-08-09 15:06:56 +02:00
|
|
|
UserGroup,
|
2022-04-14 23:57:15 +02:00
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-09-03 15:58:19 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2024-08-26 16:10:22 +02:00
|
|
|
from zerver.models.realms import get_default_max_invites_for_realm_plan_type, get_realm
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import active_user_ids
|
2024-08-22 13:36:58 +02:00
|
|
|
from zerver.tornado.django_api import send_event_on_commit
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic(savepoint=False)
|
|
|
|
def do_set_realm_property(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, name: str, value: Any, *, acting_user: UserProfile | None
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
|
|
|
"""Takes in a realm object, the name of an attribute to update, the
|
2024-09-11 00:55:14 +02:00
|
|
|
value to update and the user who initiated the update.
|
2022-04-14 23:57:15 +02:00
|
|
|
"""
|
|
|
|
property_type = Realm.property_types[name]
|
|
|
|
assert isinstance(
|
|
|
|
value, property_type
|
|
|
|
), f"Cannot update {name}: {value} is not an instance of {property_type}"
|
|
|
|
|
|
|
|
old_value = getattr(realm, name)
|
2023-12-11 20:32:24 +01:00
|
|
|
if old_value == value:
|
|
|
|
return
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
setattr(realm, name, value)
|
|
|
|
realm.save(update_fields=[name])
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property=name,
|
|
|
|
value=value,
|
|
|
|
)
|
2022-09-22 10:53:37 +02:00
|
|
|
|
|
|
|
# These settings have a different event format due to their history.
|
|
|
|
message_edit_settings = [
|
|
|
|
"allow_message_editing",
|
|
|
|
"message_content_edit_limit_seconds",
|
|
|
|
]
|
|
|
|
if name in message_edit_settings:
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
|
|
|
data={name: value},
|
|
|
|
)
|
|
|
|
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2022-04-14 23:57:15 +02:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": name,
|
|
|
|
},
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if name == "waiting_period_threshold":
|
2022-11-21 03:30:07 +01:00
|
|
|
update_users_in_full_members_system_group(realm, acting_user=acting_user)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
2024-08-22 13:36:58 +02:00
|
|
|
@transaction.atomic(durable=True)
|
2023-11-23 22:07:41 +01:00
|
|
|
def do_set_push_notifications_enabled_end_timestamp(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, value: int | None, *, acting_user: UserProfile | None
|
2023-11-23 22:07:41 +01:00
|
|
|
) -> None:
|
|
|
|
# Variant of do_set_realm_property with a bit of extra complexity
|
|
|
|
# for the fact that we store a datetime object in the database but
|
|
|
|
# use an integer format timestamp in the API.
|
|
|
|
name = "push_notifications_enabled_end_timestamp"
|
|
|
|
old_timestamp = None
|
|
|
|
old_datetime = getattr(realm, name)
|
|
|
|
if old_datetime is not None:
|
|
|
|
old_timestamp = datetime_to_timestamp(old_datetime)
|
|
|
|
|
|
|
|
if old_timestamp == value:
|
|
|
|
return
|
|
|
|
|
2024-08-22 13:36:58 +02:00
|
|
|
new_datetime = None
|
|
|
|
if value is not None:
|
|
|
|
new_datetime = timestamp_to_datetime(value)
|
|
|
|
setattr(realm, name, new_datetime)
|
|
|
|
realm.save(update_fields=[name])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-08-22 13:36:58 +02:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_timestamp,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": name,
|
|
|
|
},
|
|
|
|
)
|
2023-11-23 22:07:41 +01:00
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property=name,
|
|
|
|
value=value,
|
|
|
|
)
|
2024-08-22 13:36:58 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2023-11-23 22:07:41 +01:00
|
|
|
|
|
|
|
|
2023-11-22 12:33:48 +01:00
|
|
|
@transaction.atomic(savepoint=False)
|
2023-08-09 15:06:56 +02:00
|
|
|
def do_change_realm_permission_group_setting(
|
2024-05-23 16:21:25 +02:00
|
|
|
realm: Realm,
|
|
|
|
setting_name: str,
|
|
|
|
user_group: UserGroup,
|
2024-07-12 02:30:23 +02:00
|
|
|
old_setting_api_value: int | AnonymousSettingGroupDict | None = None,
|
2024-05-23 16:21:25 +02:00
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2023-08-09 15:06:56 +02:00
|
|
|
) -> None:
|
|
|
|
"""Takes in a realm object, the name of an attribute to update, the
|
2024-09-11 00:55:14 +02:00
|
|
|
user_group to update and the user who initiated the update.
|
2023-08-09 15:06:56 +02:00
|
|
|
"""
|
|
|
|
assert setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS
|
2024-05-23 16:21:25 +02:00
|
|
|
old_value = getattr(realm, setting_name)
|
2023-08-09 15:06:56 +02:00
|
|
|
|
|
|
|
setattr(realm, setting_name, user_group)
|
|
|
|
realm.save(update_fields=[setting_name])
|
|
|
|
|
2024-05-23 16:21:25 +02:00
|
|
|
if old_setting_api_value is None:
|
|
|
|
# Most production callers will have computed this as part of
|
|
|
|
# verifying whether there's an actual change to make, but it
|
|
|
|
# feels quite clumsy to have to pass it from unit tests, so we
|
|
|
|
# compute it here if not provided by the caller.
|
|
|
|
old_setting_api_value = get_group_setting_value_for_api(old_value)
|
|
|
|
new_setting_api_value = get_group_setting_value_for_api(user_group)
|
|
|
|
|
|
|
|
if not hasattr(old_value, "named_user_group") and hasattr(user_group, "named_user_group"):
|
|
|
|
# We delete the UserGroup which the setting was set to
|
|
|
|
# previously if it does not have any linked NamedUserGroup
|
|
|
|
# object, as it is not used anywhere else. A new UserGroup
|
|
|
|
# object would be created if the setting is later set to
|
|
|
|
# a combination of users and groups.
|
|
|
|
old_value.delete()
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
2024-05-23 16:21:25 +02:00
|
|
|
data={setting_name: new_setting_api_value},
|
2023-08-09 15:06:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2023-08-09 15:06:56 +02:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
2024-05-23 16:21:25 +02:00
|
|
|
RealmAuditLog.OLD_VALUE: get_group_setting_value_for_audit_log_data(
|
|
|
|
old_setting_api_value
|
|
|
|
),
|
|
|
|
RealmAuditLog.NEW_VALUE: get_group_setting_value_for_audit_log_data(
|
|
|
|
new_setting_api_value
|
|
|
|
),
|
2023-08-09 15:06:56 +02:00
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-10-12 09:14:27 +02:00
|
|
|
def parse_and_set_setting_value_if_required(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, setting_name: str, value: int | str, *, acting_user: UserProfile | None
|
|
|
|
) -> tuple[int | None, bool]:
|
2022-12-23 14:56:27 +01:00
|
|
|
parsed_value = parse_message_time_limit_setting(
|
2022-10-12 09:14:27 +02:00
|
|
|
value,
|
2022-12-23 14:56:27 +01:00
|
|
|
Realm.MESSAGE_TIME_LIMIT_SETTING_SPECIAL_VALUES_MAP,
|
2022-10-12 09:14:27 +02:00
|
|
|
setting_name=setting_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
setting_value_changed = False
|
|
|
|
if parsed_value is None and getattr(realm, setting_name) is not None:
|
|
|
|
# We handle "None" here separately, since in the update_realm view
|
|
|
|
# function, do_set_realm_property is called only if setting value is
|
|
|
|
# not "None". For values other than "None", the view function itself
|
|
|
|
# sets the value by calling "do_set_realm_property".
|
|
|
|
do_set_realm_property(
|
|
|
|
realm,
|
|
|
|
setting_name,
|
|
|
|
parsed_value,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
setting_value_changed = True
|
|
|
|
|
|
|
|
return parsed_value, setting_value_changed
|
|
|
|
|
|
|
|
|
2024-02-05 23:52:25 +01:00
|
|
|
def get_realm_authentication_methods_for_page_params_api(
|
2024-07-12 02:30:17 +02:00
|
|
|
realm: Realm, authentication_methods: dict[str, bool]
|
|
|
|
) -> dict[str, Any]:
|
2024-02-05 23:52:25 +01:00
|
|
|
# To avoid additional queries, this expects passing in the authentication_methods
|
|
|
|
# dictionary directly, which is useful when the caller already has to fetch it
|
|
|
|
# for other purposes - and that's the circumstance in which this function is
|
|
|
|
# currently used. We can trivially make this argument optional if needed.
|
|
|
|
|
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
result_dict: dict[str, dict[str, str | bool]] = {
|
2024-02-05 23:52:25 +01:00
|
|
|
backend_name: {"enabled": enabled, "available": True}
|
|
|
|
for backend_name, enabled in authentication_methods.items()
|
|
|
|
}
|
|
|
|
|
|
|
|
if not settings.BILLING_ENABLED:
|
|
|
|
return result_dict
|
|
|
|
|
|
|
|
# The rest of the function is only for the mechanism of restricting
|
|
|
|
# certain backends based on the realm's plan type on Zulip Cloud.
|
|
|
|
|
|
|
|
from corporate.models import CustomerPlan
|
|
|
|
|
|
|
|
for backend_name in result_dict:
|
|
|
|
available_for = AUTH_BACKEND_NAME_MAP[backend_name].available_for_cloud_plans
|
|
|
|
|
|
|
|
if available_for is not None and realm.plan_type not in available_for:
|
|
|
|
result_dict[backend_name]["available"] = False
|
|
|
|
|
|
|
|
required_upgrade_plan_number = min(
|
|
|
|
set(available_for).intersection({Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS})
|
|
|
|
)
|
|
|
|
if required_upgrade_plan_number == Realm.PLAN_TYPE_STANDARD:
|
|
|
|
required_upgrade_plan_name = CustomerPlan.name_from_tier(
|
|
|
|
CustomerPlan.TIER_CLOUD_STANDARD
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
assert required_upgrade_plan_number == Realm.PLAN_TYPE_PLUS
|
|
|
|
required_upgrade_plan_name = CustomerPlan.name_from_tier(
|
|
|
|
CustomerPlan.TIER_CLOUD_PLUS
|
|
|
|
)
|
|
|
|
|
|
|
|
result_dict[backend_name]["unavailable_reason"] = _(
|
|
|
|
"You need to upgrade to the {required_upgrade_plan_name} plan to use this authentication method."
|
|
|
|
).format(required_upgrade_plan_name=required_upgrade_plan_name)
|
|
|
|
else:
|
|
|
|
result_dict[backend_name]["available"] = True
|
|
|
|
|
|
|
|
return result_dict
|
|
|
|
|
|
|
|
|
2024-02-02 23:32:27 +01:00
|
|
|
def validate_authentication_methods_dict_from_api(
|
2024-07-12 02:30:17 +02:00
|
|
|
realm: Realm, authentication_methods: dict[str, bool]
|
2024-02-02 23:32:27 +01:00
|
|
|
) -> None:
|
|
|
|
current_authentication_methods = realm.authentication_methods_dict()
|
|
|
|
for name in authentication_methods:
|
|
|
|
if name not in current_authentication_methods:
|
|
|
|
raise JsonableError(
|
|
|
|
_("Invalid authentication method: {name}. Valid methods are: {methods}").format(
|
|
|
|
name=name, methods=sorted(current_authentication_methods.keys())
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2024-02-05 23:52:25 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
validate_plan_for_authentication_methods(realm, authentication_methods)
|
|
|
|
|
|
|
|
|
|
|
|
def validate_plan_for_authentication_methods(
|
2024-07-12 02:30:17 +02:00
|
|
|
realm: Realm, authentication_methods: dict[str, bool]
|
2024-02-05 23:52:25 +01:00
|
|
|
) -> None:
|
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
|
|
|
old_authentication_methods = realm.authentication_methods_dict()
|
|
|
|
newly_enabled_authentication_methods = {
|
|
|
|
name
|
|
|
|
for name, enabled in authentication_methods.items()
|
|
|
|
if enabled and not old_authentication_methods.get(name, False)
|
|
|
|
}
|
|
|
|
for name in newly_enabled_authentication_methods:
|
|
|
|
available_for = AUTH_BACKEND_NAME_MAP[name].available_for_cloud_plans
|
|
|
|
if available_for is not None and realm.plan_type not in available_for:
|
|
|
|
# This should only be feasible via the API, since app UI should prevent
|
|
|
|
# trying to enable an unavailable authentication method.
|
|
|
|
raise JsonableError(
|
|
|
|
_("Authentication method {name} is not available on your current plan.").format(
|
|
|
|
name=name
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2024-02-02 23:32:27 +01:00
|
|
|
|
2024-08-22 11:09:18 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2022-04-14 23:57:15 +02:00
|
|
|
def do_set_realm_authentication_methods(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, authentication_methods: dict[str, bool], *, acting_user: UserProfile | None
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
|
|
|
old_value = realm.authentication_methods_dict()
|
2024-08-22 11:09:18 +02:00
|
|
|
for key, value in authentication_methods.items():
|
|
|
|
# This does queries in a loop, but this isn't a performance sensitive
|
|
|
|
# path and is only run rarely.
|
|
|
|
if value:
|
|
|
|
RealmAuthenticationMethod.objects.get_or_create(realm=realm, name=key)
|
|
|
|
else:
|
|
|
|
RealmAuthenticationMethod.objects.filter(realm=realm, name=key).delete()
|
2023-04-16 21:53:22 +02:00
|
|
|
|
2024-08-22 11:09:18 +02:00
|
|
|
updated_value = realm.authentication_methods_dict()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-08-22 11:09:18 +02:00
|
|
|
event_time=timezone_now(),
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: updated_value,
|
|
|
|
"property": "authentication_methods",
|
|
|
|
},
|
|
|
|
)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2024-02-16 00:56:08 +01:00
|
|
|
event_data = dict(
|
|
|
|
authentication_methods=get_realm_authentication_methods_for_page_params_api(
|
|
|
|
realm, updated_value
|
|
|
|
)
|
|
|
|
)
|
2022-04-14 23:57:15 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
2024-02-16 00:56:08 +01:00
|
|
|
data=event_data,
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
2024-08-22 11:09:18 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
2022-06-20 01:39:18 +02:00
|
|
|
def do_set_realm_stream(
|
|
|
|
realm: Realm,
|
2024-01-26 14:45:37 +01:00
|
|
|
field: Literal[
|
|
|
|
"new_stream_announcements_stream",
|
|
|
|
"signup_announcements_stream",
|
|
|
|
"zulip_update_announcements_stream",
|
|
|
|
],
|
2024-07-12 02:30:23 +02:00
|
|
|
stream: Stream | None,
|
2022-06-20 01:39:18 +02:00
|
|
|
stream_id: int,
|
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
2022-06-20 01:39:18 +02:00
|
|
|
# We could calculate more of these variables from `field`, but
|
|
|
|
# it's probably more readable to not do so.
|
2024-02-07 12:13:02 +01:00
|
|
|
if field == "new_stream_announcements_stream":
|
|
|
|
old_value = realm.new_stream_announcements_stream_id
|
|
|
|
realm.new_stream_announcements_stream = stream
|
|
|
|
property = "new_stream_announcements_stream_id"
|
2024-02-07 17:11:43 +01:00
|
|
|
elif field == "signup_announcements_stream":
|
|
|
|
old_value = realm.signup_announcements_stream_id
|
|
|
|
realm.signup_announcements_stream = stream
|
|
|
|
property = "signup_announcements_stream_id"
|
2024-01-26 14:45:37 +01:00
|
|
|
elif field == "zulip_update_announcements_stream":
|
|
|
|
old_value = realm.zulip_update_announcements_stream_id
|
|
|
|
realm.zulip_update_announcements_stream = stream
|
|
|
|
property = "zulip_update_announcements_stream_id"
|
2022-06-20 01:39:18 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid realm stream field.")
|
|
|
|
|
2024-08-22 12:10:46 +02:00
|
|
|
with transaction.atomic(durable=True):
|
2022-06-20 01:39:18 +02:00
|
|
|
realm.save(update_fields=[field])
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2022-04-14 23:57:15 +02:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: stream_id,
|
|
|
|
"property": field,
|
|
|
|
},
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
2024-08-22 12:10:46 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property=property,
|
|
|
|
value=stream_id,
|
|
|
|
)
|
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
2024-02-07 12:13:02 +01:00
|
|
|
def do_set_realm_new_stream_announcements_stream(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, stream: Stream | None, stream_id: int, *, acting_user: UserProfile | None
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
2024-02-07 12:13:02 +01:00
|
|
|
do_set_realm_stream(
|
|
|
|
realm, "new_stream_announcements_stream", stream, stream_id, acting_user=acting_user
|
|
|
|
)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2022-06-20 01:39:18 +02:00
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
def do_set_realm_signup_announcements_stream(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, stream: Stream | None, stream_id: int, *, acting_user: UserProfile | None
|
2022-06-20 01:39:18 +02:00
|
|
|
) -> None:
|
|
|
|
do_set_realm_stream(
|
2024-02-07 17:11:43 +01:00
|
|
|
realm, "signup_announcements_stream", stream, stream_id, acting_user=acting_user
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-01-26 14:45:37 +01:00
|
|
|
def do_set_realm_zulip_update_announcements_stream(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, stream: Stream | None, stream_id: int, *, acting_user: UserProfile | None
|
2024-01-26 14:45:37 +01:00
|
|
|
) -> None:
|
|
|
|
do_set_realm_stream(
|
|
|
|
realm, "zulip_update_announcements_stream", stream, stream_id, acting_user=acting_user
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-08-22 10:39:11 +02:00
|
|
|
@transaction.atomic(durable=True)
|
2022-04-14 23:57:15 +02:00
|
|
|
def do_set_realm_user_default_setting(
|
|
|
|
realm_user_default: RealmUserDefault,
|
|
|
|
name: str,
|
|
|
|
value: Any,
|
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
|
|
|
old_value = getattr(realm_user_default, name)
|
|
|
|
realm = realm_user_default.realm
|
|
|
|
event_time = timezone_now()
|
|
|
|
|
2024-08-22 10:39:11 +02:00
|
|
|
setattr(realm_user_default, name, value)
|
|
|
|
realm_user_default.save(update_fields=[name])
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2024-08-22 10:39:11 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 16:46:18 +02:00
|
|
|
event_type=AuditLogEventType.REALM_DEFAULT_USER_SETTINGS_CHANGED,
|
2024-08-22 10:39:11 +02:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: value,
|
|
|
|
"property": name,
|
|
|
|
},
|
|
|
|
)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type="realm_user_settings_defaults",
|
|
|
|
op="update",
|
|
|
|
property=name,
|
|
|
|
value=value,
|
|
|
|
)
|
2024-08-22 10:39:11 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
2024-05-19 01:30:36 +02:00
|
|
|
RealmDeactivationReasonType = Literal[
|
|
|
|
"owner_request",
|
|
|
|
"tos_violation",
|
|
|
|
"inactivity",
|
|
|
|
"self_hosting_migration",
|
|
|
|
# When we change the subdomain of a realm, we leave
|
|
|
|
# behind a deactivated gravestone realm.
|
|
|
|
"subdomain_change",
|
|
|
|
]
|
|
|
|
|
|
|
|
|
|
|
|
def do_deactivate_realm(
|
|
|
|
realm: Realm,
|
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2024-05-19 01:30:36 +02:00
|
|
|
deactivation_reason: RealmDeactivationReasonType,
|
2023-09-25 22:59:44 +02:00
|
|
|
email_owners: bool,
|
2024-05-19 01:30:36 +02:00
|
|
|
) -> None:
|
2022-04-14 23:57:15 +02:00
|
|
|
"""
|
|
|
|
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
|
|
|
|
tell the difference between users that were intentionally deactivated,
|
|
|
|
e.g. by a realm admin, and users who can't currently use Zulip because their
|
|
|
|
realm has been deactivated.
|
|
|
|
"""
|
|
|
|
if realm.deactivated:
|
|
|
|
return
|
|
|
|
|
2024-09-24 23:27:28 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
|
|
|
|
2024-11-04 06:19:11 +01:00
|
|
|
with transaction.atomic(durable=True):
|
2023-12-12 17:13:51 +01:00
|
|
|
realm.deactivated = True
|
|
|
|
realm.save(update_fields=["deactivated"])
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2023-12-12 17:13:51 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
billing_session = RealmBillingSession(user=acting_user, realm=realm)
|
|
|
|
billing_session.downgrade_now_without_creating_additional_invoices()
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2023-12-12 17:13:51 +01:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_DEACTIVATED,
|
2023-12-12 17:13:51 +01:00
|
|
|
event_time=event_time,
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
|
2024-05-19 01:30:36 +02:00
|
|
|
"deactivation_reason": deactivation_reason,
|
2023-12-12 17:13:51 +01:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
from zerver.lib.remote_server import maybe_enqueue_audit_log_upload
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2023-12-12 17:13:51 +01:00
|
|
|
maybe_enqueue_audit_log_upload(realm)
|
2023-12-09 15:00:30 +01:00
|
|
|
|
2023-12-12 17:13:51 +01:00
|
|
|
ScheduledEmail.objects.filter(realm=realm).delete()
|
2023-12-09 15:00:30 +01:00
|
|
|
|
2023-12-12 17:13:51 +01:00
|
|
|
# This event will only ever be received by clients with an active
|
|
|
|
# longpoll connection, because by this point clients will be
|
|
|
|
# unable to authenticate again to their event queue (triggering an
|
|
|
|
# immediate reload into the page explaining the realm was
|
|
|
|
# deactivated). So the purpose of sending this is to flush all
|
|
|
|
# active longpoll connections for the realm.
|
|
|
|
event = dict(type="realm", op="deactivated", realm_id=realm.id)
|
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2023-12-11 04:39:12 +01:00
|
|
|
|
|
|
|
# Don't deactivate the users, as that would lose a lot of state if
|
|
|
|
# the realm needs to be reactivated, but do delete their sessions
|
|
|
|
# so they get bumped to the login screen, where they'll get a
|
|
|
|
# realm deactivation notice when they try to log in.
|
2023-12-12 17:13:51 +01:00
|
|
|
#
|
|
|
|
# Note: This is intentionally outside the transaction because it
|
|
|
|
# is unsafe to modify sessions inside transactions with the
|
|
|
|
# cached_db session plugin we're using, and our session engine
|
|
|
|
# declared in zerver/lib/safe_session_cached_db.py enforces this.
|
2023-12-11 04:39:12 +01:00
|
|
|
delete_realm_user_sessions(realm)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2023-09-25 22:59:44 +02:00
|
|
|
# Flag to send deactivated realm email to organization owners; is false
|
|
|
|
# for realm exports and realm subdomain changes so that those actions
|
|
|
|
# do not email active organization owners.
|
|
|
|
if email_owners:
|
|
|
|
do_send_realm_deactivation_email(realm, acting_user)
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
def do_reactivate_realm(realm: Realm) -> None:
|
2022-08-03 20:06:59 +02:00
|
|
|
if not realm.deactivated:
|
|
|
|
logging.warning("Realm %s cannot be reactivated because it is already active.", realm.id)
|
|
|
|
return
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
realm.deactivated = False
|
2024-11-04 06:19:11 +01:00
|
|
|
with transaction.atomic(durable=True):
|
2022-04-14 23:57:15 +02:00
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
2022-04-16 00:24:37 +02:00
|
|
|
# We hardcode acting_user=None, since realm reactivation
|
|
|
|
# uses an email authentication mechanism that will never
|
|
|
|
# know which user initiated the change.
|
|
|
|
acting_user=None,
|
2022-04-14 23:57:15 +02:00
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_REACTIVATED,
|
2022-04-14 23:57:15 +02:00
|
|
|
event_time=event_time,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
|
|
|
|
},
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
2023-12-09 15:00:30 +01:00
|
|
|
from zerver.lib.remote_server import maybe_enqueue_audit_log_upload
|
|
|
|
|
|
|
|
maybe_enqueue_audit_log_upload(realm)
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
def do_add_deactivated_redirect(realm: Realm, redirect_url: str) -> None:
|
|
|
|
realm.deactivated_redirect = redirect_url
|
|
|
|
realm.save(update_fields=["deactivated_redirect"])
|
|
|
|
|
|
|
|
|
2023-02-28 04:44:29 +01:00
|
|
|
def do_delete_all_realm_attachments(realm: Realm, *, batch_size: int = 1000) -> None:
|
2023-02-28 03:35:01 +01:00
|
|
|
# Delete attachment files from the storage backend, so that we
|
|
|
|
# don't leave them dangling.
|
|
|
|
for obj_class in Attachment, ArchivedAttachment:
|
2023-02-28 04:44:29 +01:00
|
|
|
last_id = 0
|
|
|
|
while True:
|
|
|
|
to_delete = (
|
2023-09-05 20:25:23 +02:00
|
|
|
obj_class._default_manager.filter(realm_id=realm.id, pk__gt=last_id)
|
|
|
|
.order_by("pk")
|
|
|
|
.values_list("pk", "path_id")[:batch_size]
|
2023-02-28 04:44:29 +01:00
|
|
|
)
|
|
|
|
if len(to_delete) > 0:
|
|
|
|
delete_message_attachments([row[1] for row in to_delete])
|
|
|
|
last_id = to_delete[len(to_delete) - 1][0]
|
|
|
|
if len(to_delete) < batch_size:
|
|
|
|
break
|
2023-09-05 20:25:23 +02:00
|
|
|
obj_class._default_manager.filter(realm=realm).delete()
|
2023-02-28 03:35:01 +01:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_scrub_realm(realm: Realm, *, acting_user: UserProfile | None) -> None:
|
2022-04-14 23:57:15 +02:00
|
|
|
if settings.BILLING_ENABLED:
|
2024-09-24 23:27:28 +02:00
|
|
|
from corporate.lib.stripe import RealmBillingSession
|
|
|
|
|
2023-11-22 12:44:02 +01:00
|
|
|
billing_session = RealmBillingSession(user=acting_user, realm=realm)
|
|
|
|
billing_session.downgrade_now_without_creating_additional_invoices()
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
users = UserProfile.objects.filter(realm=realm)
|
|
|
|
for user in users:
|
|
|
|
do_delete_messages_by_sender(user)
|
|
|
|
do_delete_avatar_image(user, acting_user=acting_user)
|
|
|
|
user.full_name = f"Scrubbed {generate_key()[:15]}"
|
2022-07-27 23:33:49 +02:00
|
|
|
scrubbed_email = Address(
|
|
|
|
username=f"scrubbed-{generate_key()[:15]}", domain=realm.host
|
|
|
|
).addr_spec
|
2022-04-14 23:57:15 +02:00
|
|
|
user.email = scrubbed_email
|
|
|
|
user.delivery_email = scrubbed_email
|
|
|
|
user.save(update_fields=["full_name", "email", "delivery_email"])
|
|
|
|
|
2020-05-13 18:09:17 +02:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
# We could more simply obtain the Message list by just doing
|
|
|
|
# Message.objects.filter(sender__realm=internal_realm, realm=realm), but it's
|
|
|
|
# more secure against bugs that may cause Message.realm to be incorrect for some
|
|
|
|
# cross-realm messages to also determine the actual Recipients - to prevent
|
|
|
|
# deletion of excessive messages.
|
2023-09-12 23:19:57 +02:00
|
|
|
all_recipient_ids_in_realm = [
|
|
|
|
*Stream.objects.filter(realm=realm).values_list("recipient_id", flat=True),
|
|
|
|
*UserProfile.objects.filter(realm=realm).values_list("recipient_id", flat=True),
|
|
|
|
*Subscription.objects.filter(
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type=Recipient.DIRECT_MESSAGE_GROUP, user_profile__realm=realm
|
2023-09-12 23:19:57 +02:00
|
|
|
).values_list("recipient_id", flat=True),
|
|
|
|
]
|
2020-05-13 18:09:17 +02:00
|
|
|
cross_realm_bot_message_ids = list(
|
|
|
|
Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Filtering by both message.recipient and message.realm is
|
|
|
|
# more robust for ensuring no messages belonging to
|
|
|
|
# another realm will be deleted due to some bugs.
|
|
|
|
#
|
|
|
|
# Uses index: zerver_message_realm_sender_recipient
|
2020-05-13 18:09:17 +02:00
|
|
|
sender__realm=internal_realm,
|
|
|
|
recipient_id__in=all_recipient_ids_in_realm,
|
|
|
|
realm=realm,
|
|
|
|
).values_list("id", flat=True)
|
|
|
|
)
|
|
|
|
move_messages_to_archive(cross_realm_bot_message_ids)
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
do_remove_realm_custom_profile_fields(realm)
|
2023-02-28 03:35:01 +01:00
|
|
|
do_delete_all_realm_attachments(realm)
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
acting_user=acting_user,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_SCRUBBED,
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic(durable=True)
|
|
|
|
def do_change_realm_org_type(
|
|
|
|
realm: Realm,
|
|
|
|
org_type: int,
|
2024-07-12 02:30:23 +02:00
|
|
|
acting_user: UserProfile | None,
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
|
|
|
old_value = realm.org_type
|
|
|
|
realm.org_type = org_type
|
|
|
|
realm.save(update_fields=["org_type"])
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
2024-09-03 16:46:18 +02:00
|
|
|
event_type=AuditLogEventType.REALM_ORG_TYPE_CHANGED,
|
2022-04-14 23:57:15 +02:00
|
|
|
realm=realm,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
acting_user=acting_user,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={"old_value": old_value, "new_value": org_type},
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
2022-04-11 19:26:16 +02:00
|
|
|
event = dict(type="realm", op="update", property="org_type", value=org_type)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-11 19:26:16 +02:00
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2024-08-22 18:00:14 +02:00
|
|
|
@transaction.atomic(durable=True)
|
|
|
|
def do_change_realm_max_invites(realm: Realm, max_invites: int, acting_user: UserProfile) -> None:
|
|
|
|
old_value = realm.max_invites
|
2024-08-26 16:10:22 +02:00
|
|
|
if max_invites == 0:
|
|
|
|
# Reset to default maximum for plan type
|
|
|
|
new_max = get_default_max_invites_for_realm_plan_type(realm.plan_type)
|
|
|
|
else:
|
|
|
|
new_max = max_invites
|
|
|
|
realm.max_invites = new_max # type: ignore[assignment] # https://github.com/python/mypy/issues/3004
|
2024-08-22 18:00:14 +02:00
|
|
|
realm.save(update_fields=["_max_invites"])
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-08-22 18:00:14 +02:00
|
|
|
realm=realm,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
acting_user=acting_user,
|
|
|
|
extra_data={
|
|
|
|
"old_value": old_value,
|
|
|
|
"new_value": new_max,
|
|
|
|
"property": "max_invites",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
|
|
|
def do_change_realm_plan_type(
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm, plan_type: int, *, acting_user: UserProfile | None
|
2022-04-14 23:57:15 +02:00
|
|
|
) -> None:
|
2024-02-05 23:52:25 +01:00
|
|
|
from zproject.backends import AUTH_BACKEND_NAME_MAP
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
old_value = realm.plan_type
|
2024-08-26 16:10:22 +02:00
|
|
|
if plan_type not in Realm.ALL_PLAN_TYPES:
|
|
|
|
raise AssertionError("Invalid plan type")
|
2022-06-02 15:56:30 +02:00
|
|
|
|
|
|
|
if plan_type == Realm.PLAN_TYPE_LIMITED:
|
|
|
|
# We do not allow public access on limited plans.
|
|
|
|
do_set_realm_property(realm, "enable_spectator_access", False, acting_user=acting_user)
|
|
|
|
|
2023-11-22 12:33:48 +01:00
|
|
|
if old_value in [Realm.PLAN_TYPE_PLUS, Realm.PLAN_TYPE_SELF_HOSTED] and plan_type not in [
|
|
|
|
Realm.PLAN_TYPE_PLUS,
|
|
|
|
Realm.PLAN_TYPE_SELF_HOSTED,
|
|
|
|
]:
|
|
|
|
# If downgrading to a plan that no longer has access to change
|
|
|
|
# can_access_all_users_group, set it back to the default
|
|
|
|
# value.
|
2024-04-02 18:39:18 +02:00
|
|
|
everyone_system_group = NamedUserGroup.objects.get(
|
2023-11-22 12:33:48 +01:00
|
|
|
name=SystemGroups.EVERYONE, realm=realm, is_system_group=True
|
|
|
|
)
|
|
|
|
if realm.can_access_all_users_group_id != everyone_system_group.id:
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm, "can_access_all_users_group", everyone_system_group, acting_user=acting_user
|
|
|
|
)
|
|
|
|
|
2024-02-05 23:52:25 +01:00
|
|
|
# If downgrading, disable authentication methods that are not available on the new plan.
|
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
realm_authentication_methods = realm.authentication_methods_dict()
|
|
|
|
for backend_name, enabled in realm_authentication_methods.items():
|
|
|
|
if enabled and plan_type < old_value:
|
|
|
|
available_for = AUTH_BACKEND_NAME_MAP[backend_name].available_for_cloud_plans
|
|
|
|
if available_for is not None and plan_type not in available_for:
|
|
|
|
realm_authentication_methods[backend_name] = False
|
|
|
|
if realm_authentication_methods != realm.authentication_methods_dict():
|
|
|
|
do_set_realm_authentication_methods(
|
|
|
|
realm, realm_authentication_methods, acting_user=acting_user
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:57:15 +02:00
|
|
|
realm.plan_type = plan_type
|
|
|
|
realm.save(update_fields=["plan_type"])
|
|
|
|
RealmAuditLog.objects.create(
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PLAN_TYPE_CHANGED,
|
2022-04-14 23:57:15 +02:00
|
|
|
realm=realm,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
acting_user=acting_user,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={"old_value": old_value, "new_value": plan_type},
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
|
|
|
|
2024-08-26 16:10:22 +02:00
|
|
|
realm.max_invites = get_default_max_invites_for_realm_plan_type(plan_type) # type: ignore[assignment] # https://github.com/python/mypy/issues/3004
|
|
|
|
if plan_type == Realm.PLAN_TYPE_LIMITED:
|
2022-04-14 23:57:15 +02:00
|
|
|
realm.message_visibility_limit = Realm.MESSAGE_VISIBILITY_LIMITED
|
|
|
|
else:
|
2024-08-26 16:10:22 +02:00
|
|
|
realm.message_visibility_limit = None
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
update_first_visible_message_id(realm)
|
|
|
|
|
2024-10-03 15:39:36 +02:00
|
|
|
realm.save(update_fields=["_max_invites", "message_visibility_limit"])
|
2022-04-14 23:57:15 +02:00
|
|
|
|
2024-10-11 06:38:17 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
|
|
|
data={
|
|
|
|
"plan_type": plan_type,
|
|
|
|
"upload_quota_mib": optional_bytes_to_mib(realm.upload_quota_bytes()),
|
2024-10-03 14:48:43 +02:00
|
|
|
"max_file_upload_size_mib": realm.get_max_file_upload_size_mebibytes(),
|
2024-10-11 06:38:17 +02:00
|
|
|
},
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
2022-04-14 23:57:15 +02:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_send_realm_reactivation_email(realm: Realm, *, acting_user: UserProfile | None) -> None:
|
2022-07-26 15:48:26 +02:00
|
|
|
obj = RealmReactivationStatus.objects.create(realm=realm)
|
|
|
|
|
|
|
|
url = create_confirmation_link(obj, Confirmation.REALM_REACTIVATION)
|
2022-04-14 23:57:15 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_REACTIVATION_EMAIL_SENT,
|
2022-04-14 23:57:15 +02:00
|
|
|
event_time=timezone_now(),
|
|
|
|
)
|
2023-05-01 12:47:38 +02:00
|
|
|
context = {
|
|
|
|
"confirmation_url": url,
|
2024-05-09 13:28:39 +02:00
|
|
|
"realm_url": realm.url,
|
2023-05-01 12:47:38 +02:00
|
|
|
"realm_name": realm.name,
|
|
|
|
"corporate_enabled": settings.CORPORATE_ENABLED,
|
|
|
|
}
|
2022-04-14 23:57:15 +02:00
|
|
|
language = realm.default_language
|
|
|
|
send_email_to_admins(
|
|
|
|
"zerver/emails/realm_reactivation",
|
|
|
|
realm,
|
|
|
|
from_address=FromAddress.tokenized_no_reply_address(),
|
|
|
|
from_name=FromAddress.security_email_from_name(language=language),
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
)
|
2023-09-25 22:59:44 +02:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_send_realm_deactivation_email(realm: Realm, acting_user: UserProfile | None) -> None:
|
2024-07-12 02:30:17 +02:00
|
|
|
shared_context: dict[str, Any] = {
|
2023-09-25 22:59:44 +02:00
|
|
|
"realm_name": realm.name,
|
|
|
|
}
|
|
|
|
deactivation_time = timezone_now()
|
|
|
|
owners = set(realm.get_human_owner_users())
|
|
|
|
anonymous_deactivation = False
|
|
|
|
|
|
|
|
# The realm was deactivated via the deactivate_realm management command.
|
|
|
|
if acting_user is None:
|
|
|
|
anonymous_deactivation = True
|
|
|
|
|
|
|
|
# This realm was deactivated from the support panel; we do not share the
|
|
|
|
# deactivating user's information in this case.
|
|
|
|
if acting_user is not None and acting_user not in owners:
|
|
|
|
anonymous_deactivation = True
|
|
|
|
|
|
|
|
for owner in owners:
|
|
|
|
owner_tz = owner.timezone
|
|
|
|
if owner_tz == "":
|
|
|
|
owner_tz = timezone_get_current_timezone_name()
|
|
|
|
local_date = deactivation_time.astimezone(
|
|
|
|
zoneinfo.ZoneInfo(canonicalize_timezone(owner_tz))
|
|
|
|
).date()
|
|
|
|
|
|
|
|
if anonymous_deactivation:
|
|
|
|
context = dict(
|
|
|
|
acting_user=False,
|
|
|
|
initiated_deactivation=False,
|
|
|
|
event_date=local_date,
|
|
|
|
**shared_context,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
assert acting_user is not None
|
|
|
|
if owner == acting_user:
|
|
|
|
context = dict(
|
|
|
|
acting_user=True,
|
|
|
|
initiated_deactivation=True,
|
|
|
|
event_date=local_date,
|
|
|
|
**shared_context,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
context = dict(
|
|
|
|
acting_user=True,
|
|
|
|
initiated_deactivation=False,
|
|
|
|
deactivating_owner=acting_user.full_name,
|
|
|
|
event_date=local_date,
|
|
|
|
**shared_context,
|
|
|
|
)
|
|
|
|
|
|
|
|
send_email(
|
|
|
|
"zerver/emails/realm_deactivated",
|
|
|
|
to_emails=[owner.delivery_email],
|
|
|
|
from_name=FromAddress.security_email_from_name(language=owner.default_language),
|
|
|
|
from_address=FromAddress.SUPPORT,
|
|
|
|
language=owner.default_language,
|
|
|
|
context=context,
|
|
|
|
realm=realm,
|
|
|
|
)
|