2022-04-14 23:51:16 +02:00
|
|
|
import hashlib
|
|
|
|
from collections import defaultdict
|
|
|
|
from dataclasses import dataclass
|
2022-09-19 21:48:53 +02:00
|
|
|
from typing import Any, Collection, Dict, Iterable, List, Mapping, Optional, Set, Tuple
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
import orjson
|
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2021-11-01 23:55:07 +01:00
|
|
|
from django.db.models import Q, QuerySet
|
2022-04-14 23:51:16 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import override as override_language
|
2022-09-19 21:48:53 +02:00
|
|
|
from django_stubs_ext import ValuesQuerySet
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
from zerver.actions.default_streams import (
|
|
|
|
do_remove_default_stream,
|
|
|
|
do_remove_streams_from_default_stream_group,
|
|
|
|
)
|
|
|
|
from zerver.actions.message_send import internal_send_stream_message
|
|
|
|
from zerver.lib.cache import (
|
|
|
|
cache_delete_many,
|
|
|
|
cache_set,
|
|
|
|
display_recipient_cache_key,
|
|
|
|
to_dict_cache_key_id,
|
|
|
|
)
|
|
|
|
from zerver.lib.email_mirror_helpers import encode_email_address
|
2023-05-12 21:19:13 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.lib.mention import silent_mention_syntax_for_user
|
|
|
|
from zerver.lib.message import get_last_message_id
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
|
|
|
from zerver.lib.stream_color import pick_colors
|
|
|
|
from zerver.lib.stream_subscription import (
|
|
|
|
SubInfo,
|
|
|
|
bulk_get_private_peers,
|
|
|
|
bulk_get_subscriber_peer_info,
|
|
|
|
get_active_subscriptions_for_stream_id,
|
|
|
|
get_bulk_stream_subscriber_info,
|
|
|
|
get_used_colors_for_user_ids,
|
|
|
|
)
|
2023-07-27 16:42:21 +02:00
|
|
|
from zerver.lib.stream_traffic import get_streams_traffic
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.lib.streams import (
|
|
|
|
can_access_stream_user_ids,
|
|
|
|
get_occupied_streams,
|
|
|
|
get_stream_permission_policy_name,
|
|
|
|
render_stream_description,
|
|
|
|
send_stream_creation_event,
|
2023-07-26 12:31:22 +02:00
|
|
|
stream_to_dict,
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
from zerver.lib.subscription_info import get_subscribers_query
|
2022-05-26 02:51:35 +02:00
|
|
|
from zerver.lib.types import APISubscriptionDict
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.models import (
|
|
|
|
ArchivedAttachment,
|
|
|
|
Attachment,
|
|
|
|
DefaultStream,
|
|
|
|
DefaultStreamGroup,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
2022-06-27 18:39:33 +02:00
|
|
|
UserGroup,
|
2022-04-14 23:51:16 +02:00
|
|
|
UserProfile,
|
|
|
|
active_non_guest_user_ids,
|
|
|
|
get_system_bot,
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
from zerver.tornado.django_api import send_event, send_event_on_commit
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic(savepoint=False)
|
2023-07-09 17:49:32 +02:00
|
|
|
def do_deactivate_stream(stream: Stream, *, acting_user: Optional[UserProfile]) -> None:
|
2023-05-12 21:19:13 +02:00
|
|
|
# If the stream is already deactivated, this is a no-op
|
|
|
|
if stream.deactivated is True:
|
|
|
|
raise JsonableError(_("Stream is already deactivated"))
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
# We want to mark all messages in the to-be-deactivated stream as
|
|
|
|
# read for all users; otherwise they will pollute queries like
|
|
|
|
# "Get the user's first unread message". Since this can be an
|
|
|
|
# expensive operation, we do it via the deferred_work queue
|
|
|
|
# processor.
|
|
|
|
deferred_work_event = {
|
|
|
|
"type": "mark_stream_messages_as_read_for_everyone",
|
|
|
|
"stream_recipient_id": stream.recipient_id,
|
|
|
|
}
|
|
|
|
transaction.on_commit(lambda: queue_json_publish("deferred_work", deferred_work_event))
|
|
|
|
|
|
|
|
# Get the affected user ids *before* we deactivate everybody.
|
|
|
|
affected_user_ids = can_access_stream_user_ids(stream)
|
|
|
|
|
|
|
|
get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=True).update(
|
|
|
|
active=False
|
|
|
|
)
|
|
|
|
|
|
|
|
was_invite_only = stream.invite_only
|
|
|
|
stream.deactivated = True
|
|
|
|
stream.invite_only = True
|
|
|
|
# Preserve as much as possible the original stream name while giving it a
|
|
|
|
# special prefix that both indicates that the stream is deactivated and
|
|
|
|
# frees up the original name for reuse.
|
|
|
|
old_name = stream.name
|
|
|
|
|
|
|
|
# Prepend a substring of the hashed stream ID to the new stream name
|
|
|
|
streamID = str(stream.id)
|
|
|
|
stream_id_hash_object = hashlib.sha512(streamID.encode())
|
|
|
|
hashed_stream_id = stream_id_hash_object.hexdigest()[0:7]
|
|
|
|
|
|
|
|
new_name = (hashed_stream_id + "!DEACTIVATED:" + old_name)[: Stream.MAX_NAME_LENGTH]
|
|
|
|
|
|
|
|
stream.name = new_name[: Stream.MAX_NAME_LENGTH]
|
|
|
|
stream.save(update_fields=["name", "deactivated", "invite_only"])
|
|
|
|
|
|
|
|
# If this is a default stream, remove it, properly sending a
|
|
|
|
# notification to browser clients.
|
|
|
|
if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists():
|
|
|
|
do_remove_default_stream(stream)
|
|
|
|
|
|
|
|
default_stream_groups_for_stream = DefaultStreamGroup.objects.filter(streams__id=stream.id)
|
|
|
|
for group in default_stream_groups_for_stream:
|
|
|
|
do_remove_streams_from_default_stream_group(stream.realm, group, [stream])
|
|
|
|
|
2023-07-26 12:31:22 +02:00
|
|
|
stream_dict = stream_to_dict(stream)
|
2022-04-14 23:51:16 +02:00
|
|
|
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
|
|
|
|
event = dict(type="stream", op="delete", streams=[stream_dict])
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(stream.realm, event, affected_user_ids)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_DEACTIVATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-11-01 23:55:07 +01:00
|
|
|
def deactivated_streams_by_old_name(realm: Realm, stream_name: str) -> QuerySet[Stream]:
|
|
|
|
fixed_length_prefix = ".......!DEACTIVATED:"
|
|
|
|
truncated_name = stream_name[0 : Stream.MAX_NAME_LENGTH - len(fixed_length_prefix)]
|
|
|
|
|
|
|
|
old_names: List[str] = []
|
|
|
|
for bang_length in range(1, 21):
|
|
|
|
name = "!" * bang_length + "DEACTIVATED:" + stream_name
|
|
|
|
old_names.append(name[0 : Stream.MAX_NAME_LENGTH])
|
|
|
|
|
|
|
|
possible_streams = Stream.objects.filter(realm=realm, deactivated=True).filter(
|
|
|
|
# We go looking for names as they are post-1b6f68bb59dc; 8
|
|
|
|
# characters, followed by `!DEACTIVATED:`, followed by at
|
|
|
|
# most MAX_NAME_LENGTH-(length of the prefix) of the name
|
|
|
|
# they provided:
|
|
|
|
Q(name__regex=rf"^{fixed_length_prefix}{truncated_name}")
|
|
|
|
# Finally, we go looking for the pre-1b6f68bb59dc version,
|
|
|
|
# which is any number of `!` followed by `DEACTIVATED:`
|
|
|
|
# and a prefix of the old stream name
|
|
|
|
| Q(name__in=old_names),
|
|
|
|
)
|
|
|
|
|
|
|
|
return possible_streams
|
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic(savepoint=False)
|
|
|
|
def do_reactivate_stream(
|
|
|
|
stream: Stream, new_name: str, *, acting_user: Optional[UserProfile]
|
|
|
|
) -> None:
|
|
|
|
realm = stream.realm
|
|
|
|
if not stream.deactivated:
|
|
|
|
raise JsonableError(_("Stream is not currently deactivated"))
|
|
|
|
if Stream.objects.filter(realm=realm, name=new_name).exists():
|
|
|
|
raise JsonableError(
|
|
|
|
_("Stream named {stream_name} already exists").format(stream_name=new_name)
|
|
|
|
)
|
|
|
|
assert stream.recipient_id is not None
|
|
|
|
|
|
|
|
stream.deactivated = False
|
|
|
|
stream.name = new_name
|
|
|
|
|
|
|
|
# We only set invite_only=True during deactivation, which can lead
|
|
|
|
# to the invalid state of to invite-only but also web-public
|
|
|
|
# streams. Explicitly reset the access; we do not use
|
|
|
|
# do_change_stream_permission because no users need be notified,
|
|
|
|
# and it cannot handle the broken state that may currently exist.
|
|
|
|
stream.is_web_public = False
|
|
|
|
stream.invite_only = True
|
|
|
|
stream.history_public_to_subscribers = True
|
|
|
|
stream.save(
|
|
|
|
update_fields=[
|
|
|
|
"name",
|
|
|
|
"deactivated",
|
|
|
|
"is_web_public",
|
|
|
|
"invite_only",
|
|
|
|
"history_public_to_subscribers",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Update caches
|
|
|
|
cache_set(display_recipient_cache_key(stream.recipient_id), new_name)
|
|
|
|
messages = Message.objects.filter(recipient_id=stream.recipient_id).only("id")
|
|
|
|
cache_delete_many(to_dict_cache_key_id(message.id) for message in messages)
|
|
|
|
|
|
|
|
# Unset the is_web_public cache on attachments, since the stream is now private.
|
|
|
|
Attachment.objects.filter(messages__recipient_id=stream.recipient_id).update(is_web_public=None)
|
|
|
|
ArchivedAttachment.objects.filter(messages__recipient_id=stream.recipient_id).update(
|
|
|
|
is_web_public=None
|
|
|
|
)
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_REACTIVATED,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
)
|
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
recent_traffic = None
|
|
|
|
if not realm.is_zephyr_mirror_realm:
|
|
|
|
# We do not need stream traffic data for streams in zephyr mirroring realm.
|
|
|
|
recent_traffic = get_streams_traffic({stream.id})
|
|
|
|
|
2021-11-01 23:55:07 +01:00
|
|
|
# All admins always get to know about private streams' existence,
|
|
|
|
# but we only subscribe the realm owners.
|
|
|
|
send_stream_creation_event(
|
2023-07-27 16:42:21 +02:00
|
|
|
realm, stream, [user.id for user in realm.get_admin_users_and_bots()], recent_traffic
|
2021-11-01 23:55:07 +01:00
|
|
|
)
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
realm=realm,
|
|
|
|
streams=[stream],
|
|
|
|
users=realm.get_human_owner_users(),
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id)
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
internal_send_stream_message(
|
|
|
|
sender,
|
|
|
|
stream,
|
|
|
|
str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC),
|
|
|
|
_("Stream {stream_name} un-archived.").format(stream_name=new_name),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-07-11 08:47:11 +02:00
|
|
|
def bulk_delete_cache_keys(message_ids_to_clear: List[int]) -> None:
|
|
|
|
while len(message_ids_to_clear) > 0:
|
|
|
|
batch = message_ids_to_clear[0:5000]
|
|
|
|
|
|
|
|
keys_to_delete = [to_dict_cache_key_id(message_id) for message_id in batch]
|
|
|
|
cache_delete_many(keys_to_delete)
|
|
|
|
|
|
|
|
message_ids_to_clear = message_ids_to_clear[5000:]
|
|
|
|
|
|
|
|
|
|
|
|
def merge_streams(
|
|
|
|
realm: Realm, stream_to_keep: Stream, stream_to_destroy: Stream
|
|
|
|
) -> Tuple[int, int, int]:
|
|
|
|
recipient_to_destroy = stream_to_destroy.recipient
|
|
|
|
recipient_to_keep = stream_to_keep.recipient
|
2022-07-18 21:59:42 +02:00
|
|
|
assert recipient_to_keep is not None
|
|
|
|
assert recipient_to_destroy is not None
|
2022-07-11 08:47:11 +02:00
|
|
|
if recipient_to_destroy.id == recipient_to_keep.id:
|
|
|
|
return (0, 0, 0)
|
|
|
|
|
|
|
|
# The high-level approach here is to move all the messages to
|
|
|
|
# the surviving stream, deactivate all the subscriptions on
|
|
|
|
# the stream to be removed and deactivate the stream, and add
|
|
|
|
# new subscriptions to the stream to keep for any users who
|
|
|
|
# were only on the now-deactivated stream.
|
2022-07-11 08:50:23 +02:00
|
|
|
#
|
|
|
|
# The order of operations is carefully chosen so that calling this
|
|
|
|
# function again is likely to be an effective way to recover if
|
|
|
|
# this process is interrupted by an error.
|
2022-07-11 08:47:11 +02:00
|
|
|
|
|
|
|
# Move the Subscription objects. This algorithm doesn't
|
|
|
|
# preserve any stream settings/colors/etc. from the stream
|
|
|
|
# being destroyed, but it's convenient.
|
|
|
|
existing_subs = Subscription.objects.filter(recipient=recipient_to_keep)
|
|
|
|
users_already_subscribed = {sub.user_profile_id: sub.active for sub in existing_subs}
|
|
|
|
|
|
|
|
subs_to_deactivate = Subscription.objects.filter(recipient=recipient_to_destroy, active=True)
|
|
|
|
users_to_activate = [
|
|
|
|
sub.user_profile
|
|
|
|
for sub in subs_to_deactivate
|
|
|
|
if not users_already_subscribed.get(sub.user_profile_id, False)
|
|
|
|
]
|
|
|
|
|
2022-07-11 08:50:23 +02:00
|
|
|
if len(users_to_activate) > 0:
|
|
|
|
bulk_add_subscriptions(realm, [stream_to_keep], users_to_activate, acting_user=None)
|
|
|
|
|
|
|
|
# Move the messages, and delete the old copies from caches. We do
|
|
|
|
# this before removing the subscription objects, to avoid messages
|
|
|
|
# "disappearing" if an error interrupts this function.
|
|
|
|
message_ids_to_clear = list(
|
|
|
|
Message.objects.filter(recipient=recipient_to_destroy).values_list("id", flat=True)
|
|
|
|
)
|
|
|
|
count = Message.objects.filter(recipient=recipient_to_destroy).update(
|
|
|
|
recipient=recipient_to_keep
|
|
|
|
)
|
|
|
|
bulk_delete_cache_keys(message_ids_to_clear)
|
|
|
|
|
|
|
|
# Remove subscriptions to the old stream.
|
2022-07-11 08:47:11 +02:00
|
|
|
if len(subs_to_deactivate) > 0:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
realm,
|
|
|
|
[sub.user_profile for sub in subs_to_deactivate],
|
|
|
|
[stream_to_destroy],
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2022-07-11 08:50:23 +02:00
|
|
|
|
2022-07-11 08:47:11 +02:00
|
|
|
do_deactivate_stream(stream_to_destroy, acting_user=None)
|
2022-07-11 08:50:23 +02:00
|
|
|
|
2022-07-11 08:47:11 +02:00
|
|
|
return (len(users_to_activate), count, len(subs_to_deactivate))
|
|
|
|
|
|
|
|
|
2022-06-23 18:20:29 +02:00
|
|
|
def get_subscriber_ids(
|
|
|
|
stream: Stream, requesting_user: Optional[UserProfile] = None
|
2022-09-19 21:48:53 +02:00
|
|
|
) -> ValuesQuerySet[Subscription, int]:
|
2022-04-14 23:51:16 +02:00
|
|
|
subscriptions_query = get_subscribers_query(stream, requesting_user)
|
|
|
|
return subscriptions_query.values_list("user_profile_id", flat=True)
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class StreamInfo:
|
|
|
|
email_address: str
|
|
|
|
subscribers: List[int]
|
|
|
|
|
|
|
|
|
|
|
|
def send_subscription_add_events(
|
|
|
|
realm: Realm,
|
|
|
|
sub_info_list: List[SubInfo],
|
|
|
|
subscriber_dict: Dict[int, Set[int]],
|
|
|
|
) -> None:
|
|
|
|
info_by_user: Dict[int, List[SubInfo]] = defaultdict(list)
|
|
|
|
for sub_info in sub_info_list:
|
|
|
|
info_by_user[sub_info.user.id].append(sub_info)
|
|
|
|
|
|
|
|
stream_ids = {sub_info.stream.id for sub_info in sub_info_list}
|
|
|
|
recent_traffic = get_streams_traffic(stream_ids=stream_ids)
|
|
|
|
|
|
|
|
# We generally only have a few streams, so we compute stream
|
|
|
|
# data in its own loop.
|
|
|
|
stream_info_dict: Dict[int, StreamInfo] = {}
|
|
|
|
for sub_info in sub_info_list:
|
|
|
|
stream = sub_info.stream
|
|
|
|
if stream.id not in stream_info_dict:
|
|
|
|
email_address = encode_email_address(stream, show_sender=True)
|
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
|
|
|
subscribers = []
|
|
|
|
else:
|
|
|
|
subscribers = list(subscriber_dict[stream.id])
|
|
|
|
stream_info_dict[stream.id] = StreamInfo(
|
|
|
|
email_address=email_address,
|
|
|
|
subscribers=subscribers,
|
|
|
|
)
|
|
|
|
|
|
|
|
for user_id, sub_infos in info_by_user.items():
|
2022-05-26 02:51:35 +02:00
|
|
|
sub_dicts: List[APISubscriptionDict] = []
|
2022-04-14 23:51:16 +02:00
|
|
|
for sub_info in sub_infos:
|
|
|
|
stream = sub_info.stream
|
|
|
|
stream_info = stream_info_dict[stream.id]
|
|
|
|
subscription = sub_info.sub
|
2023-07-27 16:42:21 +02:00
|
|
|
stream_dict = stream_to_dict(stream, recent_traffic)
|
2022-05-26 02:51:35 +02:00
|
|
|
# This is verbose as we cannot unpack existing TypedDict
|
|
|
|
# to initialize another TypedDict while making mypy happy.
|
|
|
|
# https://github.com/python/mypy/issues/5382
|
|
|
|
sub_dict = APISubscriptionDict(
|
|
|
|
# Fields from Subscription.API_FIELDS
|
|
|
|
audible_notifications=subscription.audible_notifications,
|
|
|
|
color=subscription.color,
|
|
|
|
desktop_notifications=subscription.desktop_notifications,
|
|
|
|
email_notifications=subscription.email_notifications,
|
|
|
|
is_muted=subscription.is_muted,
|
|
|
|
pin_to_top=subscription.pin_to_top,
|
|
|
|
push_notifications=subscription.push_notifications,
|
|
|
|
wildcard_mentions_notify=subscription.wildcard_mentions_notify,
|
|
|
|
# Computed fields not present in Subscription.API_FIELDS
|
|
|
|
email_address=stream_info.email_address,
|
|
|
|
in_home_view=not subscription.is_muted,
|
2023-07-27 16:42:21 +02:00
|
|
|
stream_weekly_traffic=stream_dict["stream_weekly_traffic"],
|
2022-05-26 02:51:35 +02:00
|
|
|
subscribers=stream_info.subscribers,
|
|
|
|
# Fields from Stream.API_FIELDS
|
2023-07-12 12:57:57 +02:00
|
|
|
can_remove_subscribers_group=stream_dict["can_remove_subscribers_group"],
|
2022-05-26 02:51:35 +02:00
|
|
|
date_created=stream_dict["date_created"],
|
|
|
|
description=stream_dict["description"],
|
|
|
|
first_message_id=stream_dict["first_message_id"],
|
|
|
|
history_public_to_subscribers=stream_dict["history_public_to_subscribers"],
|
|
|
|
invite_only=stream_dict["invite_only"],
|
|
|
|
is_web_public=stream_dict["is_web_public"],
|
|
|
|
message_retention_days=stream_dict["message_retention_days"],
|
|
|
|
name=stream_dict["name"],
|
|
|
|
rendered_description=stream_dict["rendered_description"],
|
|
|
|
stream_id=stream_dict["stream_id"],
|
|
|
|
stream_post_policy=stream_dict["stream_post_policy"],
|
|
|
|
# Computed fields not present in Stream.API_FIELDS
|
|
|
|
is_announcement_only=stream_dict["is_announcement_only"],
|
|
|
|
)
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
sub_dicts.append(sub_dict)
|
|
|
|
|
|
|
|
# Send a notification to the user who subscribed.
|
|
|
|
event = dict(type="subscription", op="add", subscriptions=sub_dicts)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
send_event(realm, event, [user_id])
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
# This function contains all the database changes as part of
|
|
|
|
# subscribing users to streams; we use a transaction to ensure that
|
|
|
|
# the RealmAuditLog entries are created atomically with the
|
|
|
|
# Subscription object creation (and updates).
|
|
|
|
@transaction.atomic(savepoint=False)
|
|
|
|
def bulk_add_subs_to_db_with_logging(
|
|
|
|
realm: Realm,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
subs_to_add: List[SubInfo],
|
|
|
|
subs_to_activate: List[SubInfo],
|
|
|
|
) -> None:
|
|
|
|
Subscription.objects.bulk_create(info.sub for info in subs_to_add)
|
|
|
|
sub_ids = [info.sub.id for info in subs_to_activate]
|
|
|
|
Subscription.objects.filter(id__in=sub_ids).update(active=True)
|
|
|
|
|
|
|
|
# Log subscription activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
|
|
|
event_last_message_id = get_last_message_id()
|
|
|
|
|
|
|
|
all_subscription_logs: (List[RealmAuditLog]) = []
|
|
|
|
for sub_info in subs_to_add:
|
|
|
|
all_subscription_logs.append(
|
|
|
|
RealmAuditLog(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=sub_info.user,
|
|
|
|
modified_stream=sub_info.stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
for sub_info in subs_to_activate:
|
|
|
|
all_subscription_logs.append(
|
|
|
|
RealmAuditLog(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=sub_info.user,
|
|
|
|
modified_stream=sub_info.stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
|
|
|
|
2023-06-30 13:14:49 +02:00
|
|
|
def send_stream_creation_events_for_previously_inaccessible_streams(
|
2022-04-14 23:51:16 +02:00
|
|
|
realm: Realm,
|
|
|
|
stream_dict: Dict[int, Stream],
|
|
|
|
altered_user_dict: Dict[int, Set[int]],
|
2023-06-30 13:14:49 +02:00
|
|
|
altered_guests: Set[int],
|
2022-04-14 23:51:16 +02:00
|
|
|
) -> None:
|
2023-07-27 16:42:21 +02:00
|
|
|
recent_traffic = None
|
|
|
|
if not realm.is_zephyr_mirror_realm:
|
|
|
|
# We do not need stream traffic data for streams in zephyr mirroring realm.
|
|
|
|
stream_ids = set(altered_user_dict.keys())
|
|
|
|
recent_traffic = get_streams_traffic(stream_ids)
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
for stream_id, stream_users_ids in altered_user_dict.items():
|
|
|
|
stream = stream_dict[stream_id]
|
|
|
|
|
2023-06-30 13:14:49 +02:00
|
|
|
notify_user_ids = []
|
2022-04-14 23:51:16 +02:00
|
|
|
if not stream.is_public():
|
|
|
|
# Users newly added to invite-only streams
|
|
|
|
# need a `create` notification. The former, because
|
|
|
|
# they need the stream to exist before
|
|
|
|
# they get the "subscribe" notification, and the latter so
|
|
|
|
# they can manage the new stream.
|
|
|
|
# Realm admins already have all created private streams.
|
|
|
|
realm_admin_ids = {user.id for user in realm.get_admin_users_and_bots()}
|
|
|
|
notify_user_ids = list(stream_users_ids - realm_admin_ids)
|
2023-06-30 13:14:49 +02:00
|
|
|
else:
|
|
|
|
# Guese users need a `create` notification for
|
|
|
|
# public streams as well because they need the stream
|
|
|
|
# to exist before they get the "subscribe" notification.
|
|
|
|
notify_user_ids = list(stream_users_ids & altered_guests)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
2023-06-30 13:14:49 +02:00
|
|
|
if notify_user_ids:
|
2023-07-27 16:42:21 +02:00
|
|
|
send_stream_creation_event(realm, stream, notify_user_ids, recent_traffic)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
def send_peer_subscriber_events(
|
|
|
|
op: str,
|
|
|
|
realm: Realm,
|
|
|
|
stream_dict: Dict[int, Stream],
|
|
|
|
altered_user_dict: Dict[int, Set[int]],
|
|
|
|
private_peer_dict: Dict[int, Set[int]],
|
|
|
|
) -> None:
|
|
|
|
# Send peer_add/peer_remove events to other users who are tracking the
|
|
|
|
# subscribers lists of streams in their browser; everyone for
|
|
|
|
# public streams and only existing subscribers for private streams.
|
|
|
|
|
|
|
|
assert op in ["peer_add", "peer_remove"]
|
|
|
|
|
|
|
|
private_stream_ids = [
|
|
|
|
stream_id for stream_id in altered_user_dict if stream_dict[stream_id].invite_only
|
|
|
|
]
|
|
|
|
|
|
|
|
for stream_id in private_stream_ids:
|
|
|
|
altered_user_ids = altered_user_dict[stream_id]
|
|
|
|
peer_user_ids = private_peer_dict[stream_id] - altered_user_ids
|
|
|
|
|
|
|
|
if peer_user_ids and altered_user_ids:
|
|
|
|
event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op=op,
|
|
|
|
stream_ids=[stream_id],
|
2022-10-30 00:35:32 +02:00
|
|
|
user_ids=sorted(altered_user_ids),
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, peer_user_ids)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
public_stream_ids = [
|
|
|
|
stream_id
|
|
|
|
for stream_id in altered_user_dict
|
|
|
|
if not stream_dict[stream_id].invite_only and not stream_dict[stream_id].is_in_zephyr_realm
|
|
|
|
]
|
|
|
|
|
|
|
|
if public_stream_ids:
|
|
|
|
user_streams: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
|
|
|
|
public_peer_ids = set(active_non_guest_user_ids(realm.id))
|
|
|
|
|
|
|
|
for stream_id in public_stream_ids:
|
|
|
|
altered_user_ids = altered_user_dict[stream_id]
|
|
|
|
peer_user_ids = public_peer_ids - altered_user_ids
|
|
|
|
|
|
|
|
if peer_user_ids and altered_user_ids:
|
|
|
|
if len(altered_user_ids) == 1:
|
|
|
|
# If we only have one user, we will try to
|
|
|
|
# find other streams they have (un)subscribed to
|
|
|
|
# (where it's just them). This optimization
|
|
|
|
# typically works when a single user is subscribed
|
|
|
|
# to multiple default public streams during
|
|
|
|
# new-user registration.
|
|
|
|
#
|
|
|
|
# This optimization depends on all public streams
|
|
|
|
# having the same peers for any single user, which
|
|
|
|
# isn't the case for private streams.
|
2023-07-19 23:03:05 +02:00
|
|
|
[altered_user_id] = altered_user_ids
|
2022-04-14 23:51:16 +02:00
|
|
|
user_streams[altered_user_id].add(stream_id)
|
|
|
|
else:
|
|
|
|
event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op=op,
|
|
|
|
stream_ids=[stream_id],
|
2022-10-30 00:35:32 +02:00
|
|
|
user_ids=sorted(altered_user_ids),
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, peer_user_ids)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
for user_id, stream_ids in user_streams.items():
|
|
|
|
peer_user_ids = public_peer_ids - {user_id}
|
|
|
|
event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op=op,
|
2022-10-30 00:35:32 +02:00
|
|
|
stream_ids=sorted(stream_ids),
|
2022-04-14 23:51:16 +02:00
|
|
|
user_ids=[user_id],
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(realm, event, peer_user_ids)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
|
|
|
|
SubT = Tuple[List[SubInfo], List[SubInfo]]
|
|
|
|
|
|
|
|
|
|
|
|
def bulk_add_subscriptions(
|
|
|
|
realm: Realm,
|
|
|
|
streams: Collection[Stream],
|
|
|
|
users: Iterable[UserProfile],
|
|
|
|
color_map: Mapping[str, str] = {},
|
|
|
|
from_user_creation: bool = False,
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
) -> SubT:
|
|
|
|
users = list(users)
|
|
|
|
user_ids = [user.id for user in users]
|
|
|
|
|
|
|
|
# Sanity check out callers
|
|
|
|
for stream in streams:
|
|
|
|
assert stream.realm_id == realm.id
|
|
|
|
|
|
|
|
for user in users:
|
|
|
|
assert user.realm_id == realm.id
|
|
|
|
|
|
|
|
recipient_ids = [stream.recipient_id for stream in streams]
|
|
|
|
recipient_id_to_stream = {stream.recipient_id: stream for stream in streams}
|
|
|
|
|
|
|
|
recipient_color_map = {}
|
2022-06-16 23:55:28 +02:00
|
|
|
recipient_ids_set: Set[int] = set()
|
2022-04-14 23:51:16 +02:00
|
|
|
for stream in streams:
|
2022-06-16 23:55:28 +02:00
|
|
|
assert stream.recipient_id is not None
|
|
|
|
recipient_ids_set.add(stream.recipient_id)
|
2022-04-14 23:51:16 +02:00
|
|
|
color: Optional[str] = color_map.get(stream.name, None)
|
|
|
|
if color is not None:
|
|
|
|
recipient_color_map[stream.recipient_id] = color
|
|
|
|
|
|
|
|
used_colors_for_user_ids: Dict[int, Set[str]] = get_used_colors_for_user_ids(user_ids)
|
|
|
|
|
|
|
|
existing_subs = Subscription.objects.filter(
|
|
|
|
user_profile_id__in=user_ids,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient_id__in=recipient_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
subs_by_user: Dict[int, List[Subscription]] = defaultdict(list)
|
|
|
|
for sub in existing_subs:
|
|
|
|
subs_by_user[sub.user_profile_id].append(sub)
|
|
|
|
|
|
|
|
already_subscribed: List[SubInfo] = []
|
|
|
|
subs_to_activate: List[SubInfo] = []
|
|
|
|
subs_to_add: List[SubInfo] = []
|
|
|
|
for user_profile in users:
|
|
|
|
my_subs = subs_by_user[user_profile.id]
|
|
|
|
|
|
|
|
# Make a fresh set of all new recipient ids, and then we will
|
|
|
|
# remove any for which our user already has a subscription
|
|
|
|
# (and we'll re-activate any subscriptions as needed).
|
2022-06-16 23:55:28 +02:00
|
|
|
new_recipient_ids: Set[int] = recipient_ids_set.copy()
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
for sub in my_subs:
|
|
|
|
if sub.recipient_id in new_recipient_ids:
|
|
|
|
new_recipient_ids.remove(sub.recipient_id)
|
|
|
|
stream = recipient_id_to_stream[sub.recipient_id]
|
|
|
|
sub_info = SubInfo(user_profile, sub, stream)
|
|
|
|
if sub.active:
|
|
|
|
already_subscribed.append(sub_info)
|
|
|
|
else:
|
|
|
|
subs_to_activate.append(sub_info)
|
|
|
|
|
|
|
|
used_colors = used_colors_for_user_ids.get(user_profile.id, set())
|
|
|
|
user_color_map = pick_colors(used_colors, recipient_color_map, list(new_recipient_ids))
|
|
|
|
|
|
|
|
for recipient_id in new_recipient_ids:
|
|
|
|
stream = recipient_id_to_stream[recipient_id]
|
|
|
|
color = user_color_map[recipient_id]
|
|
|
|
|
|
|
|
sub = Subscription(
|
|
|
|
user_profile=user_profile,
|
|
|
|
is_user_active=user_profile.is_active,
|
|
|
|
active=True,
|
|
|
|
color=color,
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
)
|
|
|
|
sub_info = SubInfo(user_profile, sub, stream)
|
|
|
|
subs_to_add.append(sub_info)
|
|
|
|
|
|
|
|
bulk_add_subs_to_db_with_logging(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
subs_to_add=subs_to_add,
|
|
|
|
subs_to_activate=subs_to_activate,
|
|
|
|
)
|
|
|
|
|
|
|
|
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
|
2023-06-30 13:14:49 +02:00
|
|
|
altered_guests: Set[int] = set()
|
2022-04-14 23:51:16 +02:00
|
|
|
for sub_info in subs_to_add + subs_to_activate:
|
|
|
|
altered_user_dict[sub_info.stream.id].add(sub_info.user.id)
|
2023-06-30 13:14:49 +02:00
|
|
|
if sub_info.user.is_guest:
|
|
|
|
altered_guests.add(sub_info.user.id)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
stream_dict = {stream.id: stream for stream in streams}
|
|
|
|
|
|
|
|
new_streams = [stream_dict[stream_id] for stream_id in altered_user_dict]
|
|
|
|
|
|
|
|
subscriber_peer_info = bulk_get_subscriber_peer_info(
|
|
|
|
realm=realm,
|
|
|
|
streams=new_streams,
|
|
|
|
)
|
|
|
|
|
|
|
|
# We now send several types of events to notify browsers. The
|
|
|
|
# first batches of notifications are sent only to the user(s)
|
|
|
|
# being subscribed; we can skip these notifications when this is
|
|
|
|
# being called from the new user creation flow.
|
|
|
|
if not from_user_creation:
|
2023-06-30 13:14:49 +02:00
|
|
|
send_stream_creation_events_for_previously_inaccessible_streams(
|
2022-04-14 23:51:16 +02:00
|
|
|
realm=realm,
|
|
|
|
stream_dict=stream_dict,
|
|
|
|
altered_user_dict=altered_user_dict,
|
2023-06-30 13:14:49 +02:00
|
|
|
altered_guests=altered_guests,
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
send_subscription_add_events(
|
|
|
|
realm=realm,
|
|
|
|
sub_info_list=subs_to_add + subs_to_activate,
|
|
|
|
subscriber_dict=subscriber_peer_info.subscribed_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
send_peer_subscriber_events(
|
|
|
|
op="peer_add",
|
|
|
|
realm=realm,
|
|
|
|
altered_user_dict=altered_user_dict,
|
|
|
|
stream_dict=stream_dict,
|
|
|
|
private_peer_dict=subscriber_peer_info.private_peer_dict,
|
|
|
|
)
|
|
|
|
|
|
|
|
return (
|
|
|
|
subs_to_add + subs_to_activate,
|
|
|
|
already_subscribed,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def send_peer_remove_events(
|
|
|
|
realm: Realm,
|
|
|
|
streams: List[Stream],
|
|
|
|
altered_user_dict: Dict[int, Set[int]],
|
|
|
|
) -> None:
|
|
|
|
private_streams = [stream for stream in streams if stream.invite_only]
|
|
|
|
|
|
|
|
private_peer_dict = bulk_get_private_peers(
|
|
|
|
realm=realm,
|
|
|
|
private_streams=private_streams,
|
|
|
|
)
|
|
|
|
stream_dict = {stream.id: stream for stream in streams}
|
|
|
|
|
|
|
|
send_peer_subscriber_events(
|
|
|
|
op="peer_remove",
|
|
|
|
realm=realm,
|
|
|
|
stream_dict=stream_dict,
|
|
|
|
altered_user_dict=altered_user_dict,
|
|
|
|
private_peer_dict=private_peer_dict,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def notify_subscriptions_removed(
|
|
|
|
realm: Realm, user_profile: UserProfile, streams: Iterable[Stream]
|
|
|
|
) -> None:
|
|
|
|
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
|
|
|
|
event = dict(type="subscription", op="remove", subscriptions=payload)
|
|
|
|
send_event(realm, event, [user_profile.id])
|
|
|
|
|
|
|
|
|
|
|
|
SubAndRemovedT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
|
|
|
|
|
2022-05-11 08:42:08 +02:00
|
|
|
def send_subscription_remove_events(
|
|
|
|
realm: Realm,
|
|
|
|
users: List[UserProfile],
|
|
|
|
streams: List[Stream],
|
|
|
|
removed_subs: List[Tuple[UserProfile, Stream]],
|
|
|
|
) -> None:
|
|
|
|
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
streams_by_user: Dict[int, List[Stream]] = defaultdict(list)
|
2023-02-02 04:35:24 +01:00
|
|
|
for user, stream in removed_subs:
|
2022-05-11 08:42:08 +02:00
|
|
|
streams_by_user[user.id].append(stream)
|
|
|
|
altered_user_dict[stream.id].add(user.id)
|
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
if len(streams_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
|
|
|
notify_subscriptions_removed(realm, user_profile, streams_by_user[user_profile.id])
|
|
|
|
|
|
|
|
event = {
|
|
|
|
"type": "mark_stream_messages_as_read",
|
|
|
|
"user_profile_id": user_profile.id,
|
|
|
|
"stream_recipient_ids": [
|
|
|
|
stream.recipient_id for stream in streams_by_user[user_profile.id]
|
|
|
|
],
|
|
|
|
}
|
|
|
|
queue_json_publish("deferred_work", event)
|
|
|
|
|
|
|
|
send_peer_remove_events(
|
|
|
|
realm=realm,
|
|
|
|
streams=streams,
|
|
|
|
altered_user_dict=altered_user_dict,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
def bulk_remove_subscriptions(
|
|
|
|
realm: Realm,
|
|
|
|
users: Iterable[UserProfile],
|
|
|
|
streams: Iterable[Stream],
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
) -> SubAndRemovedT:
|
|
|
|
users = list(users)
|
|
|
|
streams = list(streams)
|
|
|
|
|
|
|
|
# Sanity check our callers
|
|
|
|
for stream in streams:
|
|
|
|
assert stream.realm_id == realm.id
|
|
|
|
|
|
|
|
for user in users:
|
|
|
|
assert user.realm_id == realm.id
|
|
|
|
|
|
|
|
stream_dict = {stream.id: stream for stream in streams}
|
|
|
|
|
|
|
|
existing_subs_by_user = get_bulk_stream_subscriber_info(users, streams)
|
|
|
|
|
|
|
|
def get_non_subscribed_subs() -> List[Tuple[UserProfile, Stream]]:
|
|
|
|
stream_ids = {stream.id for stream in streams}
|
|
|
|
|
|
|
|
not_subscribed: List[Tuple[UserProfile, Stream]] = []
|
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
user_sub_stream_info = existing_subs_by_user[user_profile.id]
|
|
|
|
|
|
|
|
subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info}
|
|
|
|
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
|
|
|
|
|
|
|
|
for stream_id in not_subscribed_stream_ids:
|
|
|
|
stream = stream_dict[stream_id]
|
|
|
|
not_subscribed.append((user_profile, stream))
|
|
|
|
|
|
|
|
return not_subscribed
|
|
|
|
|
|
|
|
not_subscribed = get_non_subscribed_subs()
|
|
|
|
|
|
|
|
subs_to_deactivate: List[SubInfo] = []
|
|
|
|
sub_ids_to_deactivate: List[int] = []
|
|
|
|
|
|
|
|
# This loop just flattens out our data into big lists for
|
|
|
|
# bulk operations.
|
|
|
|
for sub_infos in existing_subs_by_user.values():
|
|
|
|
for sub_info in sub_infos:
|
|
|
|
subs_to_deactivate.append(sub_info)
|
|
|
|
sub_ids_to_deactivate.append(sub_info.sub.id)
|
|
|
|
|
2022-05-13 12:05:34 +02:00
|
|
|
streams_to_unsubscribe = [sub_info.stream for sub_info in subs_to_deactivate]
|
2022-04-14 23:51:16 +02:00
|
|
|
# We do all the database changes in a transaction to ensure
|
|
|
|
# RealmAuditLog entries are atomically created when making changes.
|
|
|
|
with transaction.atomic():
|
|
|
|
Subscription.objects.filter(
|
|
|
|
id__in=sub_ids_to_deactivate,
|
|
|
|
).update(active=False)
|
|
|
|
occupied_streams_after = list(get_occupied_streams(realm))
|
|
|
|
|
|
|
|
# Log subscription activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
|
|
|
event_last_message_id = get_last_message_id()
|
|
|
|
all_subscription_logs = [
|
|
|
|
RealmAuditLog(
|
|
|
|
realm=sub_info.user.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_user=sub_info.user,
|
|
|
|
modified_stream=sub_info.stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
|
|
|
for sub_info in subs_to_deactivate
|
|
|
|
]
|
|
|
|
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2022-05-11 08:42:08 +02:00
|
|
|
removed_sub_tuples = [(sub_info.user, sub_info.stream) for sub_info in subs_to_deactivate]
|
|
|
|
send_subscription_remove_events(realm, users, streams, removed_sub_tuples)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
2022-05-13 12:05:34 +02:00
|
|
|
new_vacant_streams = set(streams_to_unsubscribe) - set(occupied_streams_after)
|
2022-04-14 23:51:16 +02:00
|
|
|
new_vacant_private_streams = [stream for stream in new_vacant_streams if stream.invite_only]
|
|
|
|
|
|
|
|
if new_vacant_private_streams:
|
|
|
|
# Deactivate any newly-vacant private streams
|
|
|
|
for stream in new_vacant_private_streams:
|
|
|
|
do_deactivate_stream(stream, acting_user=acting_user)
|
|
|
|
|
|
|
|
return (
|
2022-05-11 08:42:08 +02:00
|
|
|
removed_sub_tuples,
|
2022-04-14 23:51:16 +02:00
|
|
|
not_subscribed,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_change_subscription_property(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
sub: Subscription,
|
|
|
|
stream: Stream,
|
|
|
|
property_name: str,
|
|
|
|
value: Any,
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
) -> None:
|
|
|
|
database_property_name = property_name
|
|
|
|
database_value = value
|
|
|
|
|
|
|
|
# For this property, is_muted is used in the database, but
|
2022-08-09 20:37:07 +02:00
|
|
|
# in_home_view is still in the API, since we haven't fully
|
|
|
|
# migrated to the new name yet.
|
2022-04-14 23:51:16 +02:00
|
|
|
if property_name == "in_home_view":
|
|
|
|
database_property_name = "is_muted"
|
|
|
|
database_value = not value
|
|
|
|
|
|
|
|
old_value = getattr(sub, database_property_name)
|
|
|
|
setattr(sub, database_property_name, database_value)
|
|
|
|
sub.save(update_fields=[database_property_name])
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_PROPERTY_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
modified_user=user_profile,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: database_value,
|
|
|
|
"property": database_property_name,
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
2022-08-09 20:37:07 +02:00
|
|
|
# This first in_home_view event is deprecated and will be removed
|
|
|
|
# once clients are migrated to handle the subscription update event
|
|
|
|
# with is_muted as the property name.
|
|
|
|
if database_property_name == "is_muted":
|
|
|
|
event_value = not database_value
|
|
|
|
in_home_view_event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op="update",
|
|
|
|
property="in_home_view",
|
|
|
|
value=event_value,
|
|
|
|
stream_id=stream.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
send_event(user_profile.realm, in_home_view_event, [user_profile.id])
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op="update",
|
2022-08-09 20:37:07 +02:00
|
|
|
property=database_property_name,
|
|
|
|
value=database_value,
|
2022-04-14 23:51:16 +02:00
|
|
|
stream_id=stream.id,
|
|
|
|
)
|
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
|
|
|
|
|
|
|
|
|
|
|
def send_change_stream_permission_notification(
|
|
|
|
stream: Stream,
|
|
|
|
*,
|
|
|
|
old_policy_name: str,
|
|
|
|
new_policy_name: str,
|
|
|
|
acting_user: UserProfile,
|
|
|
|
) -> None:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
|
|
|
|
user_mention = silent_mention_syntax_for_user(acting_user)
|
|
|
|
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
notification_string = _(
|
|
|
|
"{user} changed the [access permissions](/help/stream-permissions) "
|
|
|
|
"for this stream from **{old_policy}** to **{new_policy}**."
|
|
|
|
)
|
|
|
|
notification_string = notification_string.format(
|
|
|
|
user=user_mention,
|
|
|
|
old_policy=old_policy_name,
|
|
|
|
new_policy=new_policy_name,
|
|
|
|
)
|
|
|
|
internal_send_stream_message(
|
2022-08-08 19:53:11 +02:00
|
|
|
sender, stream, str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC), notification_string
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_change_stream_permission(
|
|
|
|
stream: Stream,
|
|
|
|
*,
|
2022-08-01 04:19:43 +02:00
|
|
|
invite_only: bool,
|
|
|
|
history_public_to_subscribers: bool,
|
|
|
|
is_web_public: bool,
|
2022-04-14 23:51:16 +02:00
|
|
|
acting_user: UserProfile,
|
|
|
|
) -> None:
|
|
|
|
old_invite_only_value = stream.invite_only
|
|
|
|
old_history_public_to_subscribers_value = stream.history_public_to_subscribers
|
|
|
|
old_is_web_public_value = stream.is_web_public
|
|
|
|
|
2022-08-01 04:19:43 +02:00
|
|
|
stream.is_web_public = is_web_public
|
|
|
|
stream.invite_only = invite_only
|
|
|
|
stream.history_public_to_subscribers = history_public_to_subscribers
|
2022-04-14 23:51:16 +02:00
|
|
|
|
2023-07-11 13:55:28 +02:00
|
|
|
realm = stream.realm
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
stream.save(update_fields=["invite_only", "history_public_to_subscribers", "is_web_public"])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
if old_invite_only_value != stream.invite_only:
|
|
|
|
# Reset the Attachment.is_realm_public cache for all
|
|
|
|
# messages in the stream whose permissions were changed.
|
2022-06-15 04:59:36 +02:00
|
|
|
assert stream.recipient_id is not None
|
2022-04-14 23:51:16 +02:00
|
|
|
Attachment.objects.filter(messages__recipient_id=stream.recipient_id).update(
|
|
|
|
is_realm_public=None
|
|
|
|
)
|
|
|
|
# We need to do the same for ArchivedAttachment to avoid
|
|
|
|
# bugs if deleted attachments are later restored.
|
|
|
|
ArchivedAttachment.objects.filter(messages__recipient_id=stream.recipient_id).update(
|
|
|
|
is_realm_public=None
|
|
|
|
)
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
2023-07-11 13:55:28 +02:00
|
|
|
realm=realm,
|
2022-04-14 23:51:16 +02:00
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_invite_only_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: stream.invite_only,
|
|
|
|
"property": "invite_only",
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
|
|
|
if old_history_public_to_subscribers_value != stream.history_public_to_subscribers:
|
|
|
|
RealmAuditLog.objects.create(
|
2023-07-11 13:55:28 +02:00
|
|
|
realm=realm,
|
2022-04-14 23:51:16 +02:00
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_history_public_to_subscribers_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: stream.history_public_to_subscribers,
|
|
|
|
"property": "history_public_to_subscribers",
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
|
|
|
if old_is_web_public_value != stream.is_web_public:
|
|
|
|
# Reset the Attachment.is_realm_public cache for all
|
|
|
|
# messages in the stream whose permissions were changed.
|
2022-06-15 04:59:36 +02:00
|
|
|
assert stream.recipient_id is not None
|
2022-04-14 23:51:16 +02:00
|
|
|
Attachment.objects.filter(messages__recipient_id=stream.recipient_id).update(
|
|
|
|
is_web_public=None
|
|
|
|
)
|
|
|
|
# We need to do the same for ArchivedAttachment to avoid
|
|
|
|
# bugs if deleted attachments are later restored.
|
|
|
|
ArchivedAttachment.objects.filter(messages__recipient_id=stream.recipient_id).update(
|
|
|
|
is_web_public=None
|
|
|
|
)
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
2023-07-11 13:55:28 +02:00
|
|
|
realm=realm,
|
2022-04-14 23:51:16 +02:00
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
|
|
|
|
event_time=event_time,
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_is_web_public_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: stream.is_web_public,
|
|
|
|
"property": "is_web_public",
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
2022-06-07 16:22:28 +02:00
|
|
|
notify_stream_creation_ids = set()
|
|
|
|
if old_invite_only_value and not stream.invite_only:
|
|
|
|
# We need to send stream creation event to users who can access the
|
|
|
|
# stream now but were not able to do so previously. So, we can exclude
|
|
|
|
# subscribers, users who were previously subscribed to the stream and
|
|
|
|
# realm admins from the non-guest user list.
|
2022-08-05 22:38:30 +02:00
|
|
|
assert stream.recipient_id is not None
|
2022-06-07 16:22:28 +02:00
|
|
|
previously_subscribed_user_ids = Subscription.objects.filter(
|
|
|
|
recipient_id=stream.recipient_id, active=False, is_user_active=True
|
|
|
|
).values_list("user_profile_id", flat=True)
|
|
|
|
stream_subscriber_user_ids = get_active_subscriptions_for_stream_id(
|
|
|
|
stream.id, include_deactivated_users=False
|
|
|
|
).values_list("user_profile_id", flat=True)
|
|
|
|
|
|
|
|
old_can_access_stream_user_ids = (
|
|
|
|
set(stream_subscriber_user_ids)
|
|
|
|
| set(previously_subscribed_user_ids)
|
|
|
|
| {user.id for user in stream.realm.get_admin_users_and_bots()}
|
|
|
|
)
|
|
|
|
non_guest_user_ids = set(active_non_guest_user_ids(stream.realm_id))
|
|
|
|
notify_stream_creation_ids = non_guest_user_ids - old_can_access_stream_user_ids
|
2023-07-27 16:42:21 +02:00
|
|
|
|
|
|
|
recent_traffic = None
|
|
|
|
if not realm.is_zephyr_mirror_realm:
|
|
|
|
# We do not need stream traffic data for streams in zephyr mirroing realm.
|
|
|
|
recent_traffic = get_streams_traffic({stream.id})
|
|
|
|
send_stream_creation_event(realm, stream, list(notify_stream_creation_ids), recent_traffic)
|
2022-06-07 16:22:28 +02:00
|
|
|
|
|
|
|
# Add subscribers info to the stream object. We need to send peer_add
|
|
|
|
# events to users who were previously subscribed to the streams as
|
|
|
|
# they did not had subscribers data.
|
|
|
|
old_subscribers_access_user_ids = set(stream_subscriber_user_ids) | {
|
|
|
|
user.id for user in stream.realm.get_admin_users_and_bots()
|
|
|
|
}
|
|
|
|
peer_notify_user_ids = non_guest_user_ids - old_subscribers_access_user_ids
|
|
|
|
peer_add_event = dict(
|
|
|
|
type="subscription",
|
|
|
|
op="peer_add",
|
|
|
|
stream_ids=[stream.id],
|
|
|
|
user_ids=sorted(stream_subscriber_user_ids),
|
|
|
|
)
|
|
|
|
send_event(stream.realm, peer_add_event, peer_notify_user_ids)
|
|
|
|
|
2022-04-14 23:51:16 +02:00
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="invite_only",
|
|
|
|
value=stream.invite_only,
|
|
|
|
history_public_to_subscribers=stream.history_public_to_subscribers,
|
|
|
|
is_web_public=stream.is_web_public,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
2022-06-07 16:22:28 +02:00
|
|
|
# we do not need to send update events to the users who received creation event
|
|
|
|
# since they already have the updated stream info.
|
|
|
|
notify_stream_update_ids = can_access_stream_user_ids(stream) - notify_stream_creation_ids
|
|
|
|
send_event(stream.realm, event, notify_stream_update_ids)
|
2022-04-14 23:51:16 +02:00
|
|
|
|
|
|
|
old_policy_name = get_stream_permission_policy_name(
|
|
|
|
invite_only=old_invite_only_value,
|
|
|
|
history_public_to_subscribers=old_history_public_to_subscribers_value,
|
|
|
|
is_web_public=old_is_web_public_value,
|
|
|
|
)
|
|
|
|
new_policy_name = get_stream_permission_policy_name(
|
|
|
|
invite_only=stream.invite_only,
|
|
|
|
history_public_to_subscribers=stream.history_public_to_subscribers,
|
|
|
|
is_web_public=stream.is_web_public,
|
|
|
|
)
|
|
|
|
send_change_stream_permission_notification(
|
|
|
|
stream,
|
|
|
|
old_policy_name=old_policy_name,
|
|
|
|
new_policy_name=new_policy_name,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def send_change_stream_post_policy_notification(
|
|
|
|
stream: Stream, *, old_post_policy: int, new_post_policy: int, acting_user: UserProfile
|
|
|
|
) -> None:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
|
|
|
|
user_mention = silent_mention_syntax_for_user(acting_user)
|
|
|
|
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
notification_string = _(
|
|
|
|
"{user} changed the [posting permissions](/help/stream-sending-policy) "
|
|
|
|
"for this stream:\n\n"
|
|
|
|
"* **Old permissions**: {old_policy}.\n"
|
|
|
|
"* **New permissions**: {new_policy}.\n"
|
|
|
|
)
|
|
|
|
notification_string = notification_string.format(
|
|
|
|
user=user_mention,
|
|
|
|
old_policy=Stream.POST_POLICIES[old_post_policy],
|
|
|
|
new_policy=Stream.POST_POLICIES[new_post_policy],
|
|
|
|
)
|
|
|
|
internal_send_stream_message(
|
2022-08-08 19:53:11 +02:00
|
|
|
sender, stream, str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC), notification_string
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_change_stream_post_policy(
|
|
|
|
stream: Stream, stream_post_policy: int, *, acting_user: UserProfile
|
|
|
|
) -> None:
|
|
|
|
old_post_policy = stream.stream_post_policy
|
|
|
|
with transaction.atomic():
|
|
|
|
stream.stream_post_policy = stream_post_policy
|
|
|
|
stream.save(update_fields=["stream_post_policy"])
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_post_policy,
|
|
|
|
RealmAuditLog.NEW_VALUE: stream_post_policy,
|
|
|
|
"property": "stream_post_policy",
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="stream_post_policy",
|
|
|
|
value=stream_post_policy,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
|
|
|
|
|
|
|
# Backwards-compatibility code: We removed the
|
|
|
|
# is_announcement_only property in early 2020, but we send a
|
|
|
|
# duplicate event for legacy mobile clients that might want the
|
|
|
|
# data.
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="is_announcement_only",
|
|
|
|
value=stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
|
|
|
|
|
|
|
send_change_stream_post_policy_notification(
|
|
|
|
stream,
|
|
|
|
old_post_policy=old_post_policy,
|
|
|
|
new_post_policy=stream_post_policy,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -> Dict[str, str]:
|
|
|
|
old_name = stream.name
|
|
|
|
stream.name = new_name
|
|
|
|
stream.save(update_fields=["name"])
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=user_profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_NAME_CHANGED,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_name,
|
|
|
|
RealmAuditLog.NEW_VALUE: new_name,
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
2022-05-29 21:52:25 +02:00
|
|
|
assert stream.recipient_id is not None
|
|
|
|
recipient_id: int = stream.recipient_id
|
2022-04-14 23:51:16 +02:00
|
|
|
messages = Message.objects.filter(recipient_id=recipient_id).only("id")
|
|
|
|
|
|
|
|
cache_set(display_recipient_cache_key(recipient_id), stream.name)
|
|
|
|
|
|
|
|
# Delete cache entries for everything else, which is cheaper and
|
|
|
|
# clearer than trying to set them. display_recipient is the out of
|
|
|
|
# date field in all cases.
|
|
|
|
cache_delete_many(to_dict_cache_key_id(message.id) for message in messages)
|
|
|
|
new_email = encode_email_address(stream, show_sender=True)
|
|
|
|
|
|
|
|
# We will tell our users to essentially
|
|
|
|
# update stream.name = new_name where name = old_name
|
|
|
|
# and update stream.email = new_email where name = old_name.
|
|
|
|
# We could optimize this by trying to send one message, but the
|
|
|
|
# client code really wants one property update at a time, and
|
|
|
|
# updating stream names is a pretty infrequent operation.
|
|
|
|
# More importantly, we want to key these updates by id, not name,
|
|
|
|
# since id is the immutable primary key, and obviously name is not.
|
|
|
|
data_updates = [
|
|
|
|
["email_address", new_email],
|
|
|
|
["name", new_name],
|
|
|
|
]
|
|
|
|
for property, value in data_updates:
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property=property,
|
|
|
|
value=value,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=old_name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id)
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
internal_send_stream_message(
|
|
|
|
sender,
|
|
|
|
stream,
|
2022-08-08 19:53:11 +02:00
|
|
|
str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC),
|
2022-04-14 23:51:16 +02:00
|
|
|
_("{user_name} renamed stream {old_stream_name} to {new_stream_name}.").format(
|
|
|
|
user_name=silent_mention_syntax_for_user(user_profile),
|
|
|
|
old_stream_name=f"**{old_name}**",
|
|
|
|
new_stream_name=f"**{new_name}**",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
# Even though the token doesn't change, the web client needs to update the
|
|
|
|
# email forwarding address to display the correctly-escaped new name.
|
|
|
|
return {"email_address": new_email}
|
|
|
|
|
|
|
|
|
|
|
|
def send_change_stream_description_notification(
|
|
|
|
stream: Stream, *, old_description: str, new_description: str, acting_user: UserProfile
|
|
|
|
) -> None:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
|
|
|
|
user_mention = silent_mention_syntax_for_user(acting_user)
|
|
|
|
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
if new_description == "":
|
|
|
|
new_description = "*" + _("No description.") + "*"
|
|
|
|
if old_description == "":
|
|
|
|
old_description = "*" + _("No description.") + "*"
|
|
|
|
|
|
|
|
notification_string = (
|
|
|
|
_("{user} changed the description for this stream.").format(user=user_mention)
|
|
|
|
+ "\n\n* **"
|
|
|
|
+ _("Old description")
|
|
|
|
+ ":**"
|
|
|
|
+ f"\n```` quote\n{old_description}\n````\n"
|
|
|
|
+ "* **"
|
|
|
|
+ _("New description")
|
|
|
|
+ ":**"
|
|
|
|
+ f"\n```` quote\n{new_description}\n````"
|
|
|
|
)
|
|
|
|
|
|
|
|
internal_send_stream_message(
|
2022-08-08 19:53:11 +02:00
|
|
|
sender, stream, str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC), notification_string
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_change_stream_description(
|
|
|
|
stream: Stream, new_description: str, *, acting_user: UserProfile
|
|
|
|
) -> None:
|
|
|
|
old_description = stream.description
|
|
|
|
|
|
|
|
with transaction.atomic():
|
|
|
|
stream.description = new_description
|
2022-10-29 20:52:47 +02:00
|
|
|
stream.rendered_description = render_stream_description(new_description, stream.realm)
|
2022-04-14 23:51:16 +02:00
|
|
|
stream.save(update_fields=["description", "rendered_description"])
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_description,
|
|
|
|
RealmAuditLog.NEW_VALUE: new_description,
|
|
|
|
"property": "description",
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type="stream",
|
|
|
|
op="update",
|
|
|
|
property="description",
|
|
|
|
name=stream.name,
|
|
|
|
stream_id=stream.id,
|
|
|
|
value=new_description,
|
|
|
|
rendered_description=stream.rendered_description,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
|
|
|
|
|
|
|
send_change_stream_description_notification(
|
|
|
|
stream,
|
|
|
|
old_description=old_description,
|
|
|
|
new_description=new_description,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def send_change_stream_message_retention_days_notification(
|
|
|
|
user_profile: UserProfile, stream: Stream, old_value: Optional[int], new_value: Optional[int]
|
|
|
|
) -> None:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id)
|
|
|
|
user_mention = silent_mention_syntax_for_user(user_profile)
|
|
|
|
|
|
|
|
# If switching from or to the organization's default retention policy,
|
|
|
|
# we want to take the realm's default into account.
|
|
|
|
if old_value is None:
|
|
|
|
old_value = stream.realm.message_retention_days
|
|
|
|
if new_value is None:
|
|
|
|
new_value = stream.realm.message_retention_days
|
|
|
|
|
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
if old_value == Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP["unlimited"]:
|
|
|
|
old_retention_period = _("Forever")
|
|
|
|
new_retention_period = f"{new_value} days"
|
|
|
|
summary_line = f"Messages in this stream will now be automatically deleted {new_value} days after they are sent."
|
|
|
|
elif new_value == Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP["unlimited"]:
|
|
|
|
old_retention_period = f"{old_value} days"
|
|
|
|
new_retention_period = _("Forever")
|
|
|
|
summary_line = _("Messages in this stream will now be retained forever.")
|
|
|
|
else:
|
|
|
|
old_retention_period = f"{old_value} days"
|
|
|
|
new_retention_period = f"{new_value} days"
|
|
|
|
summary_line = f"Messages in this stream will now be automatically deleted {new_value} days after they are sent."
|
|
|
|
notification_string = _(
|
|
|
|
"{user} has changed the [message retention period](/help/message-retention-policy) for this stream:\n"
|
|
|
|
"* **Old retention period**: {old_retention_period}\n"
|
|
|
|
"* **New retention period**: {new_retention_period}\n\n"
|
|
|
|
"{summary_line}"
|
|
|
|
)
|
|
|
|
notification_string = notification_string.format(
|
|
|
|
user=user_mention,
|
|
|
|
old_retention_period=old_retention_period,
|
|
|
|
new_retention_period=new_retention_period,
|
|
|
|
summary_line=summary_line,
|
|
|
|
)
|
|
|
|
internal_send_stream_message(
|
2022-08-08 19:53:11 +02:00
|
|
|
sender, stream, str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC), notification_string
|
2022-04-14 23:51:16 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def do_change_stream_message_retention_days(
|
|
|
|
stream: Stream, acting_user: UserProfile, message_retention_days: Optional[int] = None
|
|
|
|
) -> None:
|
|
|
|
old_message_retention_days_value = stream.message_retention_days
|
|
|
|
|
|
|
|
with transaction.atomic():
|
|
|
|
stream.message_retention_days = message_retention_days
|
|
|
|
stream.save(update_fields=["message_retention_days"])
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED,
|
|
|
|
event_time=timezone_now(),
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_message_retention_days_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: message_retention_days,
|
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="message_retention_days",
|
|
|
|
value=message_retention_days,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
|
|
|
send_change_stream_message_retention_days_notification(
|
|
|
|
user_profile=acting_user,
|
|
|
|
stream=stream,
|
|
|
|
old_value=old_message_retention_days_value,
|
|
|
|
new_value=message_retention_days,
|
|
|
|
)
|
2022-06-27 18:39:33 +02:00
|
|
|
|
|
|
|
|
2023-02-17 12:46:14 +01:00
|
|
|
def do_change_stream_group_based_setting(
|
|
|
|
stream: Stream,
|
|
|
|
setting_name: str,
|
|
|
|
user_group: UserGroup,
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile] = None,
|
2022-06-27 18:39:33 +02:00
|
|
|
) -> None:
|
2023-02-17 12:46:14 +01:00
|
|
|
old_user_group = getattr(stream, setting_name)
|
2022-06-27 18:39:33 +02:00
|
|
|
old_user_group_id = None
|
|
|
|
if old_user_group is not None:
|
|
|
|
old_user_group_id = old_user_group.id
|
|
|
|
|
2023-02-17 12:46:14 +01:00
|
|
|
setattr(stream, setting_name, user_group)
|
2022-06-27 18:39:33 +02:00
|
|
|
stream.save()
|
2023-02-17 12:46:14 +01:00
|
|
|
|
2022-06-27 18:39:33 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=stream.realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
2023-02-17 12:46:14 +01:00
|
|
|
event_type=RealmAuditLog.STREAM_GROUP_BASED_SETTING_CHANGED,
|
2022-06-27 18:39:33 +02:00
|
|
|
event_time=timezone_now(),
|
|
|
|
extra_data=orjson.dumps(
|
|
|
|
{
|
|
|
|
RealmAuditLog.OLD_VALUE: old_user_group_id,
|
|
|
|
RealmAuditLog.NEW_VALUE: user_group.id,
|
2023-02-17 12:46:14 +01:00
|
|
|
"property": setting_name,
|
2022-06-27 18:39:33 +02:00
|
|
|
}
|
|
|
|
).decode(),
|
|
|
|
)
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
2023-07-12 12:57:57 +02:00
|
|
|
property=setting_name,
|
2022-06-27 18:39:33 +02:00
|
|
|
value=user_group.id,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
django_api: Extract send_event_on_commit helper.
django-stubs 4.2.1 gives transaction.on_commit a more accurate type
annotation, but this exposed that mypy can’t handle the lambda default
parameters that we use to recapture loop variables such as
for stream_id in public_stream_ids:
peer_user_ids = …
event = …
transaction.on_commit(
lambda event=event, peer_user_ids=peer_user_ids: send_event(
realm, event, peer_user_ids
)
)
https://github.com/python/mypy/issues/15459
A workaround that mypy accepts is
transaction.on_commit(
(
lambda event, peer_user_ids: lambda: send_event(
realm, event, peer_user_ids
)
)(event, peer_user_ids)
)
But that’s kind of ugly and potentially error-prone, so let’s make a
helper function for this very common pattern.
send_event_on_commit(realm, event, peer_user_ids)
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2023-06-17 20:53:07 +02:00
|
|
|
send_event_on_commit(stream.realm, event, can_access_stream_user_ids(stream))
|