zulip/zerver/actions/user_settings.py

551 lines
20 KiB
Python
Raw Normal View History

import datetime
from typing import Iterable, Optional, Union
import orjson
from django.conf import settings
from django.db import transaction
from django.db.models import F
from django.utils.timezone import now as timezone_now
from confirmation.models import Confirmation, create_confirmation_link
from zerver.actions.presence import do_update_user_presence
from zerver.lib.avatar import avatar_url
from zerver.lib.cache import (
cache_delete,
delete_user_profile_caches,
flush_user_profile,
user_profile_by_api_key_cache_key,
)
from zerver.lib.create_user import get_display_email_address
from zerver.lib.i18n import get_language_name
from zerver.lib.queue import queue_json_publish
from zerver.lib.send_email import FromAddress, clear_scheduled_emails, send_email
from zerver.lib.timezone import canonicalize_timezone
from zerver.lib.upload import delete_avatar_image
from zerver.lib.users import (
can_access_delivery_email,
check_bot_name_available,
check_full_name,
get_users_with_access_to_real_email,
)
from zerver.lib.utils import generate_api_key
from zerver.models import (
Draft,
EmailChangeStatus,
RealmAuditLog,
ScheduledEmail,
ScheduledMessageNotificationEmail,
UserPresence,
UserProfile,
active_user_ids,
bot_owner_user_ids,
get_client,
get_user_profile_by_id,
)
from zerver.tornado.django_api import send_event, send_event_on_commit
def send_user_email_update_event(user_profile: UserProfile) -> None:
payload = dict(user_id=user_profile.id, new_email=user_profile.email)
event = dict(type="realm_user", op="update", person=payload)
send_event_on_commit(
user_profile.realm,
event,
active_user_ids(user_profile.realm_id),
)
def send_delivery_email_update_events(
user_profile: UserProfile, old_visibility_setting: int, visibility_setting: int
) -> None:
active_users = user_profile.realm.get_active_users()
delivery_email_now_visible_user_ids = []
delivery_email_now_invisible_user_ids = []
for active_user in active_users:
could_access_delivery_email_previously = can_access_delivery_email(
active_user, user_profile.id, old_visibility_setting
)
can_access_delivery_email_now = can_access_delivery_email(
active_user, user_profile.id, visibility_setting
)
if could_access_delivery_email_previously != can_access_delivery_email_now:
if can_access_delivery_email_now:
delivery_email_now_visible_user_ids.append(active_user.id)
else:
delivery_email_now_invisible_user_ids.append(active_user.id)
if delivery_email_now_visible_user_ids:
person = dict(user_id=user_profile.id, delivery_email=user_profile.delivery_email)
event = dict(type="realm_user", op="update", person=person)
send_event_on_commit(
user_profile.realm,
event,
delivery_email_now_visible_user_ids,
)
if delivery_email_now_invisible_user_ids:
person = dict(user_id=user_profile.id, delivery_email=None)
event = dict(type="realm_user", op="update", person=person)
send_event_on_commit(
user_profile.realm,
event,
delivery_email_now_invisible_user_ids,
)
@transaction.atomic(savepoint=False)
def do_change_user_delivery_email(user_profile: UserProfile, new_email: str) -> None:
delete_user_profile_caches([user_profile], user_profile.realm)
user_profile.delivery_email = new_email
if user_profile.email_address_is_realm_public():
user_profile.email = new_email
user_profile.save(update_fields=["email", "delivery_email"])
else:
user_profile.save(update_fields=["delivery_email"])
# We notify all the users who have access to delivery email.
payload = dict(user_id=user_profile.id, delivery_email=new_email)
event = dict(type="realm_user", op="update", person=payload)
delivery_email_visible_user_ids = get_users_with_access_to_real_email(user_profile)
send_event_on_commit(user_profile.realm, event, delivery_email_visible_user_ids)
if user_profile.avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
# If the user is using Gravatar to manage their email address,
# their Gravatar just changed, and we need to notify other
# clients.
notify_avatar_url_change(user_profile)
if user_profile.email_address_is_realm_public():
# Additionally, if we're also changing the publicly visible
# email, we send a new_email event as well.
send_user_email_update_event(user_profile)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_EMAIL_CHANGED,
event_time=event_time,
)
def do_start_email_change_process(user_profile: UserProfile, new_email: str) -> None:
old_email = user_profile.delivery_email
obj = EmailChangeStatus.objects.create(
new_email=new_email,
old_email=old_email,
user_profile=user_profile,
realm=user_profile.realm,
)
activation_url = create_confirmation_link(obj, Confirmation.EMAIL_CHANGE)
from zerver.context_processors import common_context
context = common_context(user_profile)
context.update(
old_email=old_email,
new_email=new_email,
activate_url=activation_url,
)
language = user_profile.default_language
send_email(
"zerver/emails/confirm_new_email",
to_emails=[new_email],
from_name=FromAddress.security_email_from_name(language=language),
from_address=FromAddress.tokenized_no_reply_address(),
language=language,
context=context,
realm=user_profile.realm,
)
def do_change_password(user_profile: UserProfile, password: str, commit: bool = True) -> None:
user_profile.set_password(password)
if commit:
user_profile.save(update_fields=["password"])
# Imported here to prevent import cycles
from zproject.backends import RateLimitedAuthenticationByUsername
RateLimitedAuthenticationByUsername(user_profile.delivery_email).clear_history()
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_PASSWORD_CHANGED,
event_time=event_time,
)
def do_change_full_name(
user_profile: UserProfile, full_name: str, acting_user: Optional[UserProfile]
) -> None:
old_name = user_profile.full_name
user_profile.full_name = full_name
user_profile.save(update_fields=["full_name"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_FULL_NAME_CHANGED,
event_time=event_time,
extra_data=old_name,
migration: Add `extra_data_json` for audit log models. Note that we use the DjangoJSONEncoder so that we have builtin support for parsing Decimal and datetime. During this intermediate state, the migration that creates extra_data_json field has been run. We prepare for running the backfilling migration that populates extra_data_json from extra_data. This change implements double-write, which is important to keep the state of extra data consistent. For most extra_data usage, this is handled by the overriden `save` method on `AbstractRealmAuditLog`, where we either generates extra_data_json using orjson.loads or ast.literal_eval. While backfilling ensures that old realm audit log entries have extra_data_json populated, double-write ensures that any new entries generated will also have extra_data_json set. So that we can then safely rename extra_data_json to extra_data while ensuring the non-nullable invariant. For completeness, we additionally set RealmAuditLog.NEW_VALUE for the USER_FULL_NAME_CHANGED event. This cannot be handled with the overridden `save`. This addresses: https://github.com/zulip/zulip/pull/23116#discussion_r1040277795 Note that extra_data_json at this point is not used yet. So the test cases do not need to switch to testing extra_data_json. This is later done after we rename extra_data_json to extra_data. Double-write for the remote server audit logs is special, because we only get the dumped bytes from an external source. Luckily, none of the payload carries extra_data that is not generated using orjson.dumps for audit logs of event types in SYNC_BILLING_EVENTS. This can be verified by looking at: `git grep -A 6 -E "event_type=.*(USER_CREATED|USER_ACTIVATED|USER_DEACTIVATED|USER_REACTIVATED|USER_ROLE_CHANGED|REALM_DEACTIVATED|REALM_REACTIVATED)"` Therefore, we just need to populate extra_data_json doing an orjson.loads call after a None-check. Co-authored-by: Zixuan James Li <p359101898@gmail.com>
2023-06-07 21:14:43 +02:00
extra_data_json={RealmAuditLog.OLD_VALUE: old_name, RealmAuditLog.NEW_VALUE: full_name},
)
payload = dict(user_id=user_profile.id, full_name=user_profile.full_name)
send_event(
user_profile.realm,
dict(type="realm_user", op="update", person=payload),
active_user_ids(user_profile.realm_id),
)
if user_profile.is_bot:
send_event(
user_profile.realm,
dict(type="realm_bot", op="update", bot=payload),
bot_owner_user_ids(user_profile),
)
def check_change_full_name(
user_profile: UserProfile, full_name_raw: str, acting_user: Optional[UserProfile]
) -> str:
"""Verifies that the user's proposed full name is valid. The caller
is responsible for checking check permissions. Returns the new
full name, which may differ from what was passed in (because this
function strips whitespace)."""
new_full_name = check_full_name(full_name_raw)
do_change_full_name(user_profile, new_full_name, acting_user)
return new_full_name
def check_change_bot_full_name(
user_profile: UserProfile, full_name_raw: str, acting_user: UserProfile
) -> None:
new_full_name = check_full_name(full_name_raw)
if new_full_name == user_profile.full_name:
# Our web app will try to patch full_name even if the user didn't
# modify the name in the form. We just silently ignore those
# situations.
return
check_bot_name_available(
realm_id=user_profile.realm_id,
full_name=new_full_name,
)
do_change_full_name(user_profile, new_full_name, acting_user)
@transaction.atomic(durable=True)
def do_change_tos_version(user_profile: UserProfile, tos_version: Optional[str]) -> None:
user_profile.tos_version = tos_version
user_profile.save(update_fields=["tos_version"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_TERMS_OF_SERVICE_VERSION_CHANGED,
event_time=event_time,
)
def do_regenerate_api_key(user_profile: UserProfile, acting_user: UserProfile) -> str:
old_api_key = user_profile.api_key
new_api_key = generate_api_key()
user_profile.api_key = new_api_key
user_profile.save(update_fields=["api_key"])
# We need to explicitly delete the old API key from our caches,
# because the on-save handler for flushing the UserProfile object
# in zerver/lib/cache.py only has access to the new API key.
cache_delete(user_profile_by_api_key_cache_key(old_api_key))
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_API_KEY_CHANGED,
event_time=event_time,
)
if user_profile.is_bot:
send_event(
user_profile.realm,
dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
api_key=new_api_key,
),
),
bot_owner_user_ids(user_profile),
)
event = {"type": "clear_push_device_tokens", "user_profile_id": user_profile.id}
queue_json_publish("deferred_work", event)
return new_api_key
def bulk_regenerate_api_keys(user_profile_ids: Iterable[int]) -> None:
for user_profile_id in user_profile_ids:
user_profile = get_user_profile_by_id(user_profile_id)
do_regenerate_api_key(user_profile, user_profile)
def notify_avatar_url_change(user_profile: UserProfile) -> None:
if user_profile.is_bot:
bot_event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
avatar_url=avatar_url(user_profile),
),
)
send_event_on_commit(
user_profile.realm,
bot_event,
bot_owner_user_ids(user_profile),
)
payload = dict(
avatar_source=user_profile.avatar_source,
avatar_url=avatar_url(user_profile),
avatar_url_medium=avatar_url(user_profile, medium=True),
avatar_version=user_profile.avatar_version,
# Even clients using client_gravatar don't need the email,
# since we're sending the URL anyway.
user_id=user_profile.id,
)
event = dict(type="realm_user", op="update", person=payload)
send_event_on_commit(
user_profile.realm,
event,
active_user_ids(user_profile.realm_id),
)
@transaction.atomic(savepoint=False)
def do_change_avatar_fields(
user_profile: UserProfile,
avatar_source: str,
skip_notify: bool = False,
*,
acting_user: Optional[UserProfile],
) -> None:
user_profile.avatar_source = avatar_source
user_profile.avatar_version += 1
user_profile.save(update_fields=["avatar_source", "avatar_version"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
event_type=RealmAuditLog.USER_AVATAR_SOURCE_CHANGED,
extra_data=str({"avatar_source": avatar_source}),
event_time=event_time,
acting_user=acting_user,
)
if not skip_notify:
notify_avatar_url_change(user_profile)
def do_delete_avatar_image(user: UserProfile, *, acting_user: Optional[UserProfile]) -> None:
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_GRAVATAR, acting_user=acting_user)
delete_avatar_image(user)
def update_scheduled_email_notifications_time(
user_profile: UserProfile, old_batching_period: int, new_batching_period: int
) -> None:
existing_scheduled_emails = ScheduledMessageNotificationEmail.objects.filter(
user_profile=user_profile
)
scheduled_timestamp_change = datetime.timedelta(
seconds=new_batching_period
) - datetime.timedelta(seconds=old_batching_period)
existing_scheduled_emails.update(
scheduled_timestamp=F("scheduled_timestamp") + scheduled_timestamp_change
)
@transaction.atomic(durable=True)
def do_change_user_setting(
user_profile: UserProfile,
setting_name: str,
setting_value: Union[bool, str, int],
*,
acting_user: Optional[UserProfile],
) -> None:
old_value = getattr(user_profile, setting_name)
event_time = timezone_now()
if setting_name == "timezone":
assert isinstance(setting_value, str)
setting_value = canonicalize_timezone(setting_value)
else:
property_type = UserProfile.property_types[setting_name]
assert isinstance(setting_value, property_type)
setattr(user_profile, setting_name, setting_value)
# TODO: Move these database actions into a transaction.atomic block.
user_profile.save(update_fields=[setting_name])
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.USER_SETTING_CHANGED,
event_time=event_time,
acting_user=acting_user,
modified_user=user_profile,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: setting_value,
"property": setting_name,
}
).decode(),
)
# Disabling digest emails should clear a user's email queue
if setting_name == "enable_digest_emails" and not setting_value:
clear_scheduled_emails(user_profile.id, ScheduledEmail.DIGEST)
if setting_name == "email_notifications_batching_period_seconds":
assert isinstance(old_value, int)
assert isinstance(setting_value, int)
update_scheduled_email_notifications_time(user_profile, old_value, setting_value)
event = {
"type": "user_settings",
"op": "update",
"property": setting_name,
"value": setting_value,
}
if setting_name == "default_language":
assert isinstance(setting_value, str)
event["language_name"] = get_language_name(setting_value)
send_event_on_commit(user_profile.realm, event, [user_profile.id])
if setting_name in UserProfile.notification_settings_legacy:
# This legacy event format is for backwards-compatibility with
# clients that don't support the new user_settings event type.
# We only send this for settings added before Feature level 89.
legacy_event = {
"type": "update_global_notifications",
"user": user_profile.email,
"notification_name": setting_name,
"setting": setting_value,
}
send_event_on_commit(user_profile.realm, legacy_event, [user_profile.id])
if setting_name in UserProfile.display_settings_legacy or setting_name == "timezone":
# This legacy event format is for backwards-compatibility with
# clients that don't support the new user_settings event type.
# We only send this for settings added before Feature level 89.
legacy_event = {
"type": "update_display_settings",
"user": user_profile.email,
"setting_name": setting_name,
"setting": setting_value,
}
if setting_name == "default_language":
assert isinstance(setting_value, str)
legacy_event["language_name"] = get_language_name(setting_value)
send_event_on_commit(user_profile.realm, legacy_event, [user_profile.id])
# Updates to the time zone display setting are sent to all users
if setting_name == "timezone":
payload = dict(
email=user_profile.email,
user_id=user_profile.id,
timezone=canonicalize_timezone(user_profile.timezone),
)
timezone_event = dict(type="realm_user", op="update", person=payload)
send_event_on_commit(
user_profile.realm,
timezone_event,
active_user_ids(user_profile.realm_id),
)
if setting_name == "email_address_visibility":
send_delivery_email_update_events(
user_profile, old_value, user_profile.email_address_visibility
)
if UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE not in [old_value, setting_value]:
# We use real email addresses on UserProfile.email only if
# EMAIL_ADDRESS_VISIBILITY_EVERYONE is configured, so
# changes between values that will not require changing
# that field, so we can save work and return here.
return
user_profile.email = get_display_email_address(user_profile)
user_profile.save(update_fields=["email"])
transaction.on_commit(lambda: flush_user_profile(sender=UserProfile, instance=user_profile))
send_user_email_update_event(user_profile)
notify_avatar_url_change(user_profile)
if setting_name == "enable_drafts_synchronization" and setting_value is False:
# Delete all of the drafts from the backend but don't send delete events
# for them since all that's happened is that we stopped syncing changes,
# not deleted every previously synced draft - to do that use the DELETE
# endpoint.
Draft.objects.filter(user_profile=user_profile).delete()
if setting_name == "presence_enabled":
# The presence_enabled setting's primary function is to stop
# doing presence updates for the user altogether.
#
# When a user toggles the presence_enabled setting, we
# immediately trigger a presence update, so all users see the
# user's current presence state as consistent with the new
# setting; not doing so can make it look like the settings
# change didn't have any effect.
if setting_value:
presence: Rewrite the backend data model. This implements the core of the rewrite described in: For the backend data model for UserPresence to one that supports much more efficient queries and is more correct around handling of multiple clients. The main loss of functionality is that we no longer track which Client sent presence data (so we will no longer be able to say using UserPresence "the user was last online on their desktop 15 minutes ago, but was online with their phone 3 minutes ago"). If we consider that information important for the occasional investigation query, we have can construct that answer data via UserActivity already. It's not worth making Presence much more expensive/complex to support it. For slim_presence clients, this sends the same data format we sent before, albeit with less complexity involved in constructing it. Note that we at present will always send both last_active_time and last_connected_time; we may revisit that in the future. This commit doesn't include the finalizing migration, which drops the UserPresenceOld table. The way to deploy is to start the backfill migration with the server down and then start the server *without* the user_presence queue worker, to let the migration finish without having new data interfering with it. Once the migration is done, the queue worker can be started, leading to the presence data catching up to the current state as the queue worker goes over the queued up events and updating the UserPresence table. Co-authored-by: Mateusz Mandera <mateusz.mandera@zulip.com>
2020-06-11 16:03:47 +02:00
status = UserPresence.LEGACY_STATUS_ACTIVE_INT
presence_time = timezone_now()
else:
# HACK: Remove existing presence data for the current user
# when disabling presence. This hack will go away when we
# replace our presence data structure with a simpler model
# that doesn't separate individual clients.
UserPresence.objects.filter(user_profile_id=user_profile.id).delete()
# We create a single presence entry for the user, old
# enough to be guaranteed to be treated as offline by
# correct clients, such that the user will, for as long as
# presence remains disabled, appear to have been last
# online a few minutes before they disabled presence.
#
# We add a small additional offset as a fudge factor in
# case of clock skew.
presence: Rewrite the backend data model. This implements the core of the rewrite described in: For the backend data model for UserPresence to one that supports much more efficient queries and is more correct around handling of multiple clients. The main loss of functionality is that we no longer track which Client sent presence data (so we will no longer be able to say using UserPresence "the user was last online on their desktop 15 minutes ago, but was online with their phone 3 minutes ago"). If we consider that information important for the occasional investigation query, we have can construct that answer data via UserActivity already. It's not worth making Presence much more expensive/complex to support it. For slim_presence clients, this sends the same data format we sent before, albeit with less complexity involved in constructing it. Note that we at present will always send both last_active_time and last_connected_time; we may revisit that in the future. This commit doesn't include the finalizing migration, which drops the UserPresenceOld table. The way to deploy is to start the backfill migration with the server down and then start the server *without* the user_presence queue worker, to let the migration finish without having new data interfering with it. Once the migration is done, the queue worker can be started, leading to the presence data catching up to the current state as the queue worker goes over the queued up events and updating the UserPresence table. Co-authored-by: Mateusz Mandera <mateusz.mandera@zulip.com>
2020-06-11 16:03:47 +02:00
status = UserPresence.LEGACY_STATUS_IDLE_INT
presence_time = timezone_now() - datetime.timedelta(
seconds=settings.OFFLINE_THRESHOLD_SECS + 120
)
do_update_user_presence(
user_profile,
get_client("website"),
presence_time,
status,
force_send_update=True,
)