2023-07-27 16:42:21 +02:00
|
|
|
from typing import Collection, Dict, List, Optional, Set, Tuple, TypedDict, Union
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2021-12-16 15:38:33 +01:00
|
|
|
from django.db import transaction
|
2023-03-04 01:52:14 +01:00
|
|
|
from django.db.models import Exists, OuterRef, Q, QuerySet
|
2020-06-29 23:31:25 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2023-07-08 21:20:28 +02:00
|
|
|
from zerver.lib.default_streams import get_default_stream_ids_for_realm
|
2021-09-21 20:04:51 +02:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
JsonableError,
|
2022-11-17 09:30:48 +01:00
|
|
|
OrganizationAdministratorRequiredError,
|
|
|
|
OrganizationOwnerRequiredError,
|
2021-09-21 20:04:51 +02:00
|
|
|
)
|
2020-07-04 14:34:46 +02:00
|
|
|
from zerver.lib.markdown import markdown_convert
|
2022-04-14 23:42:50 +02:00
|
|
|
from zerver.lib.stream_subscription import (
|
|
|
|
get_active_subscriptions_for_stream_id,
|
|
|
|
get_subscribed_stream_ids_for_user,
|
|
|
|
)
|
2023-07-27 16:42:21 +02:00
|
|
|
from zerver.lib.stream_traffic import get_average_weekly_stream_traffic, get_streams_traffic
|
2022-01-11 21:47:44 +01:00
|
|
|
from zerver.lib.string_validation import check_stream_name
|
2023-07-26 12:31:22 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2022-05-26 02:51:35 +02:00
|
|
|
from zerver.lib.types import APIStreamDict
|
2022-07-13 18:47:44 +02:00
|
|
|
from zerver.lib.user_groups import is_user_in_group
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
DefaultStreamGroup,
|
|
|
|
Realm,
|
2020-06-29 23:31:25 +02:00
|
|
|
RealmAuditLog,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
2022-07-13 20:44:28 +02:00
|
|
|
UserGroup,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2023-12-15 03:57:04 +01:00
|
|
|
)
|
|
|
|
from zerver.models.groups import SystemGroups
|
|
|
|
from zerver.models.streams import (
|
2020-06-11 00:54:34 +02:00
|
|
|
bulk_get_streams,
|
|
|
|
get_realm_stream,
|
|
|
|
get_stream,
|
|
|
|
get_stream_by_id_in_realm,
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import active_non_guest_user_ids, active_user_ids, is_cross_realm_bot_email
|
2020-08-10 18:40:38 +02:00
|
|
|
from zerver.tornado.django_api import send_event
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
class StreamDict(TypedDict, total=False):
|
|
|
|
"""
|
|
|
|
This type ultimately gets used in two places:
|
|
|
|
|
|
|
|
- we use it to create a stream
|
|
|
|
- we use it to specify a stream
|
|
|
|
|
2021-03-30 13:35:22 +02:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
It's possible we want a smaller type to use
|
|
|
|
for removing streams, but it would complicate
|
|
|
|
how we write the types for list_to_stream.
|
|
|
|
|
|
|
|
Note that these fields are just a subset of
|
|
|
|
the fields in the Stream model.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
name: str
|
|
|
|
description: str
|
|
|
|
invite_only: bool
|
|
|
|
is_web_public: bool
|
|
|
|
stream_post_policy: int
|
|
|
|
history_public_to_subscribers: Optional[bool]
|
|
|
|
message_retention_days: Optional[int]
|
2022-09-17 08:35:29 +02:00
|
|
|
can_remove_subscribers_group: Optional[UserGroup]
|
2020-09-29 18:06:50 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-11 00:41:25 +01:00
|
|
|
def get_stream_permission_policy_name(
|
|
|
|
*,
|
|
|
|
invite_only: Optional[bool] = None,
|
|
|
|
history_public_to_subscribers: Optional[bool] = None,
|
|
|
|
is_web_public: Optional[bool] = None,
|
|
|
|
) -> str:
|
|
|
|
policy_name = None
|
2023-07-31 22:16:30 +02:00
|
|
|
for permission_dict in Stream.PERMISSION_POLICIES.values():
|
2021-12-11 00:41:25 +01:00
|
|
|
if (
|
|
|
|
permission_dict["invite_only"] == invite_only
|
|
|
|
and permission_dict["history_public_to_subscribers"] == history_public_to_subscribers
|
|
|
|
and permission_dict["is_web_public"] == is_web_public
|
|
|
|
):
|
|
|
|
policy_name = permission_dict["policy_name"]
|
|
|
|
break
|
|
|
|
|
|
|
|
assert policy_name is not None
|
|
|
|
return policy_name
|
|
|
|
|
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
def get_default_value_for_history_public_to_subscribers(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm: Realm,
|
|
|
|
invite_only: bool,
|
|
|
|
history_public_to_subscribers: Optional[bool],
|
2020-03-24 14:47:41 +01:00
|
|
|
) -> bool:
|
|
|
|
if invite_only:
|
|
|
|
if history_public_to_subscribers is None:
|
|
|
|
# A private stream's history is non-public by default
|
|
|
|
history_public_to_subscribers = False
|
|
|
|
else:
|
|
|
|
# If we later decide to support public streams without
|
|
|
|
# history, we can remove this code path.
|
|
|
|
history_public_to_subscribers = True
|
|
|
|
|
|
|
|
if realm.is_zephyr_mirror_realm:
|
|
|
|
# In the Zephyr mirroring model, history is unconditionally
|
|
|
|
# not public to subscribers, even for public streams.
|
|
|
|
history_public_to_subscribers = False
|
|
|
|
|
|
|
|
return history_public_to_subscribers
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-29 20:52:47 +02:00
|
|
|
def render_stream_description(text: str, realm: Realm) -> str:
|
|
|
|
return markdown_convert(text, message_realm=realm, no_previews=True).rendered_content
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
def send_stream_creation_event(
|
|
|
|
realm: Realm,
|
|
|
|
stream: Stream,
|
|
|
|
user_ids: List[int],
|
|
|
|
recent_traffic: Optional[Dict[int, int]] = None,
|
|
|
|
) -> None:
|
|
|
|
event = dict(type="stream", op="create", streams=[stream_to_dict(stream, recent_traffic)])
|
2023-07-11 13:55:28 +02:00
|
|
|
send_event(realm, event, user_ids)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_stream_if_needed(
|
|
|
|
realm: Realm,
|
|
|
|
stream_name: str,
|
|
|
|
*,
|
|
|
|
invite_only: bool = False,
|
|
|
|
is_web_public: bool = False,
|
|
|
|
stream_post_policy: int = Stream.STREAM_POST_POLICY_EVERYONE,
|
|
|
|
history_public_to_subscribers: Optional[bool] = None,
|
|
|
|
stream_description: str = "",
|
|
|
|
message_retention_days: Optional[int] = None,
|
2022-09-17 08:35:29 +02:00
|
|
|
can_remove_subscribers_group: Optional[UserGroup] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user: Optional[UserProfile] = None,
|
|
|
|
) -> Tuple[Stream, bool]:
|
2020-03-24 14:47:41 +01:00
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm, invite_only, history_public_to_subscribers
|
|
|
|
)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2022-09-17 08:35:29 +02:00
|
|
|
if can_remove_subscribers_group is None:
|
|
|
|
can_remove_subscribers_group = UserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.ADMINISTRATORS, is_system_group=True, realm=realm
|
2022-09-17 08:35:29 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
assert can_remove_subscribers_group is not None
|
2021-12-16 15:38:33 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
(stream, created) = Stream.objects.get_or_create(
|
|
|
|
realm=realm,
|
|
|
|
name__iexact=stream_name,
|
|
|
|
defaults=dict(
|
|
|
|
name=stream_name,
|
|
|
|
description=stream_description,
|
|
|
|
invite_only=invite_only,
|
|
|
|
is_web_public=is_web_public,
|
|
|
|
stream_post_policy=stream_post_policy,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
|
|
|
is_in_zephyr_realm=realm.is_zephyr_mirror_realm,
|
|
|
|
message_retention_days=message_retention_days,
|
2022-09-17 08:35:29 +02:00
|
|
|
can_remove_subscribers_group=can_remove_subscribers_group,
|
2021-12-16 15:38:33 +01:00
|
|
|
),
|
|
|
|
)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-12-16 15:38:33 +01:00
|
|
|
if created:
|
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
|
|
|
|
stream.recipient = recipient
|
2022-10-29 20:52:47 +02:00
|
|
|
stream.rendered_description = render_stream_description(stream_description, realm)
|
2021-12-16 15:38:33 +01:00
|
|
|
stream.save(update_fields=["recipient", "rendered_description"])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
acting_user=acting_user,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_type=RealmAuditLog.STREAM_CREATED,
|
|
|
|
event_time=event_time,
|
|
|
|
)
|
2020-03-24 14:47:41 +01:00
|
|
|
if created:
|
|
|
|
if stream.is_public():
|
2023-10-26 18:14:01 +02:00
|
|
|
if stream.is_web_public:
|
|
|
|
notify_user_ids = active_user_ids(stream.realm_id)
|
|
|
|
else:
|
|
|
|
notify_user_ids = active_non_guest_user_ids(stream.realm_id)
|
|
|
|
send_stream_creation_event(realm, stream, notify_user_ids)
|
2020-03-24 14:47:41 +01:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_admin_ids = [user.id for user in stream.realm.get_admin_users_and_bots()]
|
2023-07-11 13:55:28 +02:00
|
|
|
send_stream_creation_event(realm, stream, realm_admin_ids)
|
2020-06-29 23:31:25 +02:00
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
return stream, created
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
def create_streams_if_needed(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm: Realm, stream_dicts: List[StreamDict], acting_user: Optional[UserProfile] = None
|
2020-09-29 18:06:50 +02:00
|
|
|
) -> Tuple[List[Stream], List[Stream]]:
|
2020-03-24 14:47:41 +01:00
|
|
|
"""Note that stream_dict["name"] is assumed to already be stripped of
|
|
|
|
whitespace"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
added_streams: List[Stream] = []
|
|
|
|
existing_streams: List[Stream] = []
|
2020-03-24 14:47:41 +01:00
|
|
|
for stream_dict in stream_dicts:
|
2021-03-27 05:48:37 +01:00
|
|
|
invite_only = stream_dict.get("invite_only", False)
|
2020-03-24 14:47:41 +01:00
|
|
|
stream, created = create_stream_if_needed(
|
|
|
|
realm,
|
|
|
|
stream_dict["name"],
|
2021-03-27 05:48:37 +01:00
|
|
|
invite_only=invite_only,
|
2021-05-20 10:50:17 +02:00
|
|
|
is_web_public=stream_dict.get("is_web_public", False),
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_post_policy=stream_dict.get(
|
|
|
|
"stream_post_policy", Stream.STREAM_POST_POLICY_EVERYONE
|
|
|
|
),
|
2020-03-24 14:47:41 +01:00
|
|
|
history_public_to_subscribers=stream_dict.get("history_public_to_subscribers"),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
stream_description=stream_dict.get("description", ""),
|
2020-06-29 23:31:25 +02:00
|
|
|
message_retention_days=stream_dict.get("message_retention_days", None),
|
2022-09-17 08:35:29 +02:00
|
|
|
can_remove_subscribers_group=stream_dict.get("can_remove_subscribers_group", None),
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=acting_user,
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if created:
|
|
|
|
added_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
return added_streams, existing_streams
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
def subscribed_to_stream(user_profile: UserProfile, stream_id: int) -> bool:
|
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient__type_id=stream_id,
|
|
|
|
).exists()
|
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-03-30 13:35:22 +02:00
|
|
|
def check_stream_access_based_on_stream_post_policy(sender: UserProfile, stream: Stream) -> None:
|
2020-03-24 14:47:41 +01:00
|
|
|
if sender.is_realm_admin or is_cross_realm_bot_email(sender.delivery_email):
|
|
|
|
pass
|
|
|
|
elif stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS:
|
|
|
|
raise JsonableError(_("Only organization administrators can send to this stream."))
|
2021-03-29 16:01:39 +02:00
|
|
|
elif (
|
|
|
|
stream.stream_post_policy == Stream.STREAM_POST_POLICY_MODERATORS
|
|
|
|
and not sender.is_moderator
|
|
|
|
):
|
|
|
|
raise JsonableError(
|
|
|
|
_("Only organization administrators and moderators can send to this stream.")
|
|
|
|
)
|
2020-10-08 20:30:11 +02:00
|
|
|
elif stream.stream_post_policy != Stream.STREAM_POST_POLICY_EVERYONE and sender.is_guest:
|
|
|
|
raise JsonableError(_("Guests cannot send to this stream."))
|
2021-03-30 13:35:22 +02:00
|
|
|
elif (
|
|
|
|
stream.stream_post_policy == Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS
|
|
|
|
and sender.is_provisional_member
|
|
|
|
):
|
|
|
|
raise JsonableError(_("New members cannot send to this stream."))
|
|
|
|
|
|
|
|
|
|
|
|
def access_stream_for_send_message(
|
|
|
|
sender: UserProfile, stream: Stream, forwarder_user_profile: Optional[UserProfile]
|
|
|
|
) -> None:
|
|
|
|
# Our caller is responsible for making sure that `stream` actually
|
|
|
|
# matches the realm of the sender.
|
|
|
|
try:
|
|
|
|
check_stream_access_based_on_stream_post_policy(sender, stream)
|
|
|
|
except JsonableError as e:
|
|
|
|
if sender.is_bot and sender.bot_owner is not None:
|
|
|
|
check_stream_access_based_on_stream_post_policy(sender.bot_owner, stream)
|
|
|
|
else:
|
|
|
|
raise JsonableError(e.msg)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-03-25 15:42:51 +01:00
|
|
|
# forwarder_user_profile cases should be analyzed first, as incorrect
|
|
|
|
# message forging is cause for denying access regardless of any other factors.
|
|
|
|
if forwarder_user_profile is not None and forwarder_user_profile != sender:
|
|
|
|
if (
|
|
|
|
forwarder_user_profile.can_forge_sender
|
|
|
|
and forwarder_user_profile.realm_id == sender.realm_id
|
|
|
|
and sender.realm_id == stream.realm_id
|
|
|
|
):
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
|
|
|
|
|
|
|
if is_cross_realm_bot_email(sender.delivery_email):
|
|
|
|
return
|
|
|
|
|
|
|
|
if stream.realm_id != sender.realm_id:
|
|
|
|
# Sending to other realm's streams is always disallowed,
|
|
|
|
# with the exception of cross-realm bots.
|
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
|
|
|
|
2020-07-24 04:56:12 +02:00
|
|
|
if stream.is_web_public:
|
|
|
|
# Even guest users can write to web-public streams.
|
|
|
|
return
|
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
if not (stream.invite_only or sender.is_guest):
|
|
|
|
# This is a public stream and sender is not a guest user
|
|
|
|
return
|
|
|
|
|
|
|
|
if subscribed_to_stream(sender, stream.id):
|
|
|
|
# It is private, but your are subscribed
|
|
|
|
return
|
|
|
|
|
2020-12-20 14:21:42 +01:00
|
|
|
if sender.can_forge_sender:
|
2021-03-25 15:42:51 +01:00
|
|
|
# can_forge_sender allows sending to any stream in the realm.
|
2020-03-24 14:47:41 +01:00
|
|
|
return
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if sender.is_bot and (
|
|
|
|
sender.bot_owner is not None and subscribed_to_stream(sender.bot_owner, stream.id)
|
|
|
|
):
|
2020-03-24 14:47:41 +01:00
|
|
|
# Bots can send to any stream their owner can.
|
|
|
|
return
|
|
|
|
|
|
|
|
# All other cases are an error.
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Not authorized to send to stream '{stream_name}'").format(stream_name=stream.name)
|
|
|
|
)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-24 17:04:27 +01:00
|
|
|
def check_for_exactly_one_stream_arg(stream_id: Optional[int], stream: Optional[str]) -> None:
|
|
|
|
if stream_id is None and stream is None:
|
|
|
|
raise JsonableError(_("Please supply 'stream'."))
|
|
|
|
|
|
|
|
if stream_id is not None and stream is not None:
|
|
|
|
raise JsonableError(_("Please choose one: 'stream' or 'stream_id'."))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def check_stream_access_for_delete_or_update(
|
|
|
|
user_profile: UserProfile, stream: Stream, sub: Optional[Subscription] = None
|
|
|
|
) -> None:
|
2022-05-27 14:03:08 +02:00
|
|
|
error = _("Invalid stream ID")
|
2020-07-13 16:13:28 +02:00
|
|
|
if stream.realm_id != user_profile.realm_id:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return
|
|
|
|
|
|
|
|
if sub is None and stream.invite_only:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2020-07-13 16:13:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def access_stream_for_delete_or_update(
|
|
|
|
user_profile: UserProfile, stream_id: int
|
|
|
|
) -> Tuple[Stream, Optional[Subscription]]:
|
2017-08-22 21:41:08 +02:00
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(id=stream_id)
|
|
|
|
except Stream.DoesNotExist:
|
2022-05-27 14:03:08 +02:00
|
|
|
raise JsonableError(_("Invalid stream ID"))
|
2017-08-22 21:41:08 +02:00
|
|
|
|
2020-07-13 16:13:28 +02:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
sub = Subscription.objects.get(
|
|
|
|
user_profile=user_profile, recipient=stream.recipient, active=True
|
|
|
|
)
|
2020-07-13 16:13:28 +02:00
|
|
|
except Subscription.DoesNotExist:
|
|
|
|
sub = None
|
2017-08-22 21:41:08 +02:00
|
|
|
|
2020-07-13 16:13:28 +02:00
|
|
|
check_stream_access_for_delete_or_update(user_profile, stream, sub)
|
2020-08-19 21:38:38 +02:00
|
|
|
return (stream, sub)
|
2017-08-22 21:41:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-13 12:55:20 +02:00
|
|
|
def check_basic_stream_access(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream: Stream,
|
|
|
|
sub: Optional[Subscription],
|
|
|
|
allow_realm_admin: bool = False,
|
|
|
|
) -> bool:
|
|
|
|
# Any realm user, even guests, can access web_public streams.
|
|
|
|
if stream.is_web_public:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# If the stream is in your realm and public, you can access it.
|
|
|
|
if stream.is_public() and not user_profile.is_guest:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Or if you are subscribed to the stream, you can access it.
|
|
|
|
if sub is not None:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# For some specific callers (e.g. getting list of subscribers,
|
|
|
|
# removing other users from a stream, and updating stream name and
|
|
|
|
# description), we allow realm admins to access stream even if
|
|
|
|
# they are not subscribed to a private stream.
|
|
|
|
if user_profile.is_realm_admin and allow_realm_admin:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2018-02-14 17:59:01 +01:00
|
|
|
# Only set allow_realm_admin flag to True when you want to allow realm admin to
|
|
|
|
# access unsubscribed private stream content.
|
2020-10-16 17:11:46 +02:00
|
|
|
def access_stream_common(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream: Stream,
|
|
|
|
error: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
require_active: bool = True,
|
|
|
|
allow_realm_admin: bool = False,
|
2020-10-16 17:11:46 +02:00
|
|
|
) -> Optional[Subscription]:
|
2017-01-30 00:48:45 +01:00
|
|
|
"""Common function for backend code where the target use attempts to
|
|
|
|
access the target stream, returning all the data fetched along the
|
|
|
|
way. If that user does not have permission to access that stream,
|
|
|
|
we throw an exception. A design goal is that the error message is
|
|
|
|
the same for streams you can't access and streams that don't exist."""
|
|
|
|
|
|
|
|
# First, we don't allow any access to streams in other realms.
|
|
|
|
if stream.realm_id != user_profile.realm_id:
|
2021-04-09 21:35:45 +02:00
|
|
|
# Callers should verify this on their own, so this functions as defensive code.
|
|
|
|
raise AssertionError("user_profile and stream realms don't match")
|
2017-01-30 00:48:45 +01:00
|
|
|
|
|
|
|
try:
|
2022-06-15 04:59:36 +02:00
|
|
|
assert stream.recipient_id is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
sub = Subscription.objects.get(
|
|
|
|
user_profile=user_profile, recipient_id=stream.recipient_id, active=require_active
|
|
|
|
)
|
2017-01-30 00:48:45 +01:00
|
|
|
except Subscription.DoesNotExist:
|
|
|
|
sub = None
|
|
|
|
|
2022-07-13 12:55:20 +02:00
|
|
|
if check_basic_stream_access(user_profile, stream, sub, allow_realm_admin=allow_realm_admin):
|
2020-10-16 17:11:46 +02:00
|
|
|
return sub
|
2018-02-14 17:59:01 +01:00
|
|
|
|
2017-01-30 00:48:45 +01:00
|
|
|
# Otherwise it is a private stream and you're not on it, so throw
|
|
|
|
# an error.
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def access_stream_by_id(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
require_active: bool = True,
|
|
|
|
allow_realm_admin: bool = False,
|
|
|
|
) -> Tuple[Stream, Optional[Subscription]]:
|
2022-05-27 14:03:08 +02:00
|
|
|
error = _("Invalid stream ID")
|
2021-04-09 21:35:45 +02:00
|
|
|
try:
|
|
|
|
stream = get_stream_by_id_in_realm(stream_id, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
2020-10-16 17:11:46 +02:00
|
|
|
sub = access_stream_common(
|
|
|
|
user_profile,
|
|
|
|
stream,
|
|
|
|
error,
|
|
|
|
require_active=require_active,
|
|
|
|
allow_realm_admin=allow_realm_admin,
|
|
|
|
)
|
2020-10-16 17:25:48 +02:00
|
|
|
return (stream, sub)
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
def get_public_streams_queryset(realm: Realm) -> QuerySet[Stream]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return Stream.objects.filter(realm=realm, invite_only=False, history_public_to_subscribers=True)
|
|
|
|
|
2019-08-13 20:20:36 +02:00
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
def get_web_public_streams_queryset(realm: Realm) -> QuerySet[Stream]:
|
2021-09-28 01:10:40 +02:00
|
|
|
# This should match the include_web_public code path in do_get_streams.
|
2021-02-12 08:19:30 +01:00
|
|
|
return Stream.objects.filter(
|
|
|
|
realm=realm,
|
2021-09-28 01:10:40 +02:00
|
|
|
is_web_public=True,
|
|
|
|
# In theory, nothing conflicts with allowing web-public access
|
|
|
|
# to deactivated streams. However, we should offer a way to
|
|
|
|
# review archived streams and adjust their settings before
|
|
|
|
# allowing that configuration to exist.
|
2021-02-12 08:19:30 +01:00
|
|
|
deactivated=False,
|
2021-09-28 01:10:40 +02:00
|
|
|
# In theory, is_web_public=True implies invite_only=False and
|
|
|
|
# history_public_to_subscribers=True, but it's safer to include
|
|
|
|
# these in the query.
|
2021-02-12 08:19:30 +01:00
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
)
|
|
|
|
|
2020-08-04 19:33:43 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_stream_name_available(realm: Realm, name: str) -> None:
|
2017-01-30 06:42:09 +01:00
|
|
|
check_stream_name(name)
|
2017-03-23 07:22:28 +01:00
|
|
|
try:
|
|
|
|
get_stream(name, realm)
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Stream name '{stream_name}' is already taken.").format(stream_name=name)
|
|
|
|
)
|
2017-03-23 07:22:28 +01:00
|
|
|
except Stream.DoesNotExist:
|
|
|
|
pass
|
2017-01-30 06:42:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def access_stream_by_name(
|
|
|
|
user_profile: UserProfile, stream_name: str, allow_realm_admin: bool = False
|
|
|
|
) -> Tuple[Stream, Optional[Subscription]]:
|
2023-07-17 22:40:33 +02:00
|
|
|
error = _("Invalid stream name '{stream_name}'").format(stream_name=stream_name)
|
2017-03-23 07:22:28 +01:00
|
|
|
try:
|
2017-10-22 02:02:27 +02:00
|
|
|
stream = get_realm_stream(stream_name, user_profile.realm_id)
|
2017-03-23 07:22:28 +01:00
|
|
|
except Stream.DoesNotExist:
|
2017-01-30 00:48:45 +01:00
|
|
|
raise JsonableError(error)
|
|
|
|
|
2020-10-16 17:11:46 +02:00
|
|
|
sub = access_stream_common(
|
|
|
|
user_profile,
|
|
|
|
stream,
|
|
|
|
error,
|
|
|
|
allow_realm_admin=allow_realm_admin,
|
|
|
|
)
|
2020-10-16 18:00:07 +02:00
|
|
|
return (stream, sub)
|
2017-01-30 02:57:24 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-21 17:12:05 +02:00
|
|
|
def access_web_public_stream(stream_id: int, realm: Realm) -> Stream:
|
2022-05-27 14:03:08 +02:00
|
|
|
error = _("Invalid stream ID")
|
2020-08-21 17:12:05 +02:00
|
|
|
try:
|
|
|
|
stream = get_stream_by_id_in_realm(stream_id, realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
|
|
|
if not stream.is_web_public:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-03 09:24:30 +02:00
|
|
|
def access_stream_to_remove_visibility_policy_by_name(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile, stream_name: str, error: str
|
|
|
|
) -> Stream:
|
2017-08-30 02:19:34 +02:00
|
|
|
"""
|
|
|
|
It may seem a little silly to have this helper function for unmuting
|
|
|
|
topics, but it gets around a linter warning, and it helps to be able
|
|
|
|
to review all security-related stuff in one place.
|
|
|
|
|
|
|
|
Our policy for accessing streams when you unmute a topic is that you
|
|
|
|
don't necessarily need to have an active subscription or even "legal"
|
|
|
|
access to the stream. Instead, we just verify the stream_id has been
|
|
|
|
muted in the past (not here, but in the caller).
|
|
|
|
|
|
|
|
Long term, we'll probably have folks just pass us in the id of the
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic row to unmute topics.
|
2017-08-30 02:19:34 +02:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-03 09:24:30 +02:00
|
|
|
def access_stream_to_remove_visibility_policy_by_id(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile, stream_id: int, error: str
|
|
|
|
) -> Stream:
|
2018-12-24 17:04:27 +01:00
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(id=stream_id, realm_id=user_profile.realm_id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-19 22:29:58 +02:00
|
|
|
def private_stream_user_ids(stream_id: int) -> Set[int]:
|
|
|
|
subscriptions = get_active_subscriptions_for_stream_id(
|
|
|
|
stream_id, include_deactivated_users=False
|
|
|
|
)
|
|
|
|
return {sub["user_profile_id"] for sub in subscriptions.values("user_profile_id")}
|
|
|
|
|
|
|
|
|
|
|
|
def public_stream_user_ids(stream: Stream) -> Set[int]:
|
|
|
|
guest_subscriptions = get_active_subscriptions_for_stream_id(
|
|
|
|
stream.id, include_deactivated_users=False
|
|
|
|
).filter(user_profile__role=UserProfile.ROLE_GUEST)
|
2022-05-29 21:52:25 +02:00
|
|
|
guest_subscriptions_ids = {
|
2021-04-19 22:29:58 +02:00
|
|
|
sub["user_profile_id"] for sub in guest_subscriptions.values("user_profile_id")
|
|
|
|
}
|
2022-05-29 21:52:25 +02:00
|
|
|
return set(active_non_guest_user_ids(stream.realm_id)) | guest_subscriptions_ids
|
2021-04-19 22:29:58 +02:00
|
|
|
|
|
|
|
|
|
|
|
def can_access_stream_user_ids(stream: Stream) -> Set[int]:
|
|
|
|
# return user ids of users who can access the attributes of a
|
|
|
|
# stream, such as its name/description. Useful for sending events
|
|
|
|
# to all users with access to a stream's attributes.
|
|
|
|
if stream.is_public():
|
|
|
|
# For a public stream, this is everyone in the realm
|
|
|
|
# except unsubscribed guest users
|
|
|
|
return public_stream_user_ids(stream)
|
|
|
|
else:
|
|
|
|
# for a private stream, it's subscribers plus realm admins.
|
|
|
|
return private_stream_user_ids(stream.id) | {
|
|
|
|
user.id for user in stream.realm.get_admin_users_and_bots()
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-08-13 19:52:22 +02:00
|
|
|
def can_access_stream_history(user_profile: UserProfile, stream: Stream) -> bool:
|
2018-04-05 00:10:01 +02:00
|
|
|
"""Determine whether the provided user is allowed to access the
|
|
|
|
history of the target stream. The stream is specified by name.
|
|
|
|
|
|
|
|
This is used by the caller to determine whether this user can get
|
|
|
|
historical messages before they joined for a narrowing search.
|
2017-08-15 18:58:29 +02:00
|
|
|
|
|
|
|
Because of the way our search is currently structured,
|
|
|
|
we may be passed an invalid stream here. We return
|
|
|
|
False in that situation, and subsequent code will do
|
|
|
|
validation and raise the appropriate JsonableError.
|
|
|
|
|
|
|
|
Note that this function should only be used in contexts where
|
|
|
|
access_stream is being called elsewhere to confirm that the user
|
|
|
|
can actually see this stream.
|
|
|
|
"""
|
2021-04-17 16:27:26 +02:00
|
|
|
|
|
|
|
if user_profile.realm_id != stream.realm_id:
|
|
|
|
raise AssertionError("user_profile and stream realms don't match")
|
|
|
|
|
2020-07-24 04:56:12 +02:00
|
|
|
if stream.is_web_public:
|
|
|
|
return True
|
|
|
|
|
2018-05-02 17:00:06 +02:00
|
|
|
if stream.is_history_realm_public() and not user_profile.is_guest:
|
2018-04-05 00:28:14 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
if stream.is_history_public_to_subscribers():
|
|
|
|
# In this case, we check if the user is subscribed.
|
2023-07-17 22:40:33 +02:00
|
|
|
error = _("Invalid stream name '{stream_name}'").format(stream_name=stream.name)
|
2018-04-05 00:28:14 +02:00
|
|
|
try:
|
2020-10-16 17:11:46 +02:00
|
|
|
access_stream_common(user_profile, stream, error)
|
2018-04-05 00:28:14 +02:00
|
|
|
except JsonableError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
return False
|
2017-08-15 18:58:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-13 19:52:22 +02:00
|
|
|
def can_access_stream_history_by_name(user_profile: UserProfile, stream_name: str) -> bool:
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return False
|
|
|
|
return can_access_stream_history(user_profile, stream)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-07 17:32:19 +02:00
|
|
|
def can_access_stream_history_by_id(user_profile: UserProfile, stream_id: int) -> bool:
|
|
|
|
try:
|
|
|
|
stream = get_stream_by_id_in_realm(stream_id, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return False
|
|
|
|
return can_access_stream_history(user_profile, stream)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-13 18:47:44 +02:00
|
|
|
def can_remove_subscribers_from_stream(
|
2022-09-19 22:17:41 +02:00
|
|
|
stream: Stream, user_profile: UserProfile, sub: Optional[Subscription]
|
2022-07-13 18:47:44 +02:00
|
|
|
) -> bool:
|
|
|
|
if not check_basic_stream_access(user_profile, stream, sub, allow_realm_admin=True):
|
|
|
|
return False
|
|
|
|
|
|
|
|
group_allowed_to_remove_subscribers = stream.can_remove_subscribers_group
|
|
|
|
assert group_allowed_to_remove_subscribers is not None
|
|
|
|
return is_user_in_group(group_allowed_to_remove_subscribers, user_profile)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def filter_stream_authorization(
|
2021-04-30 00:15:33 +02:00
|
|
|
user_profile: UserProfile, streams: Collection[Stream]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Tuple[List[Stream], List[Stream]]:
|
2019-12-06 00:15:59 +01:00
|
|
|
recipient_ids = [stream.recipient_id for stream in streams]
|
2021-02-12 08:19:30 +01:00
|
|
|
subscribed_recipient_ids = set(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, recipient_id__in=recipient_ids, active=True
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("recipient_id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-01-30 02:57:24 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
unauthorized_streams: List[Stream] = []
|
2017-01-30 02:57:24 +01:00
|
|
|
for stream in streams:
|
2017-07-05 11:50:47 +02:00
|
|
|
# The user is authorized for their own streams
|
2020-10-13 21:35:46 +02:00
|
|
|
if stream.recipient_id in subscribed_recipient_ids:
|
2017-01-30 02:57:24 +01:00
|
|
|
continue
|
|
|
|
|
2022-01-29 00:54:13 +01:00
|
|
|
# Web-public streams are accessible even to guests
|
2020-07-24 05:30:58 +02:00
|
|
|
if stream.is_web_public:
|
|
|
|
continue
|
|
|
|
|
2020-07-29 23:54:00 +02:00
|
|
|
# Members and administrators are authorized for public streams
|
|
|
|
if not stream.invite_only and not user_profile.is_guest:
|
|
|
|
continue
|
|
|
|
|
|
|
|
unauthorized_streams.append(stream)
|
2017-01-30 02:57:24 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
authorized_streams = [
|
|
|
|
stream
|
|
|
|
for stream in streams
|
|
|
|
if stream.id not in {stream.id for stream in unauthorized_streams}
|
|
|
|
]
|
2017-01-30 02:57:24 +01:00
|
|
|
return authorized_streams, unauthorized_streams
|
2017-01-30 03:02:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def list_to_streams(
|
2021-04-30 00:15:33 +02:00
|
|
|
streams_raw: Collection[StreamDict],
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile,
|
|
|
|
autocreate: bool = False,
|
2022-07-13 12:15:13 +02:00
|
|
|
unsubscribing_others: bool = False,
|
2023-08-08 19:28:04 +02:00
|
|
|
is_default_stream: bool = False,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Tuple[List[Stream], List[Stream]]:
|
2017-01-30 03:02:40 +01:00
|
|
|
"""Converts list of dicts to a list of Streams, validating input in the process
|
|
|
|
|
|
|
|
For each stream name, we validate it to ensure it meets our
|
2017-01-30 07:01:19 +01:00
|
|
|
requirements for a proper stream name using check_stream_name.
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream dictionaries to process;
|
|
|
|
names should already be stripped of whitespace by the caller.
|
2020-03-28 01:25:56 +01:00
|
|
|
@param user_profile The user for whom we are retrieving the streams
|
2017-01-30 03:02:40 +01:00
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
"""
|
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
stream_set = {stream_dict["name"] for stream_dict in streams_raw}
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
for stream_name in stream_set:
|
|
|
|
# Stream names should already have been stripped by the
|
|
|
|
# caller, but it makes sense to verify anyway.
|
|
|
|
assert stream_name == stream_name.strip()
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2017-01-30 03:02:40 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
existing_streams: List[Stream] = []
|
2020-09-29 18:06:50 +02:00
|
|
|
missing_stream_dicts: List[StreamDict] = []
|
2017-01-30 03:02:40 +01:00
|
|
|
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
|
|
|
|
|
2022-07-13 12:15:13 +02:00
|
|
|
if unsubscribing_others:
|
2020-10-16 18:39:31 +02:00
|
|
|
existing_recipient_ids = [stream.recipient_id for stream in existing_stream_map.values()]
|
|
|
|
subs = Subscription.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile=user_profile, recipient_id__in=existing_recipient_ids, active=True
|
|
|
|
)
|
2020-10-16 18:39:31 +02:00
|
|
|
sub_map = {sub.recipient_id: sub for sub in subs}
|
2020-07-13 16:13:28 +02:00
|
|
|
for stream in existing_stream_map.values():
|
2020-10-16 18:39:31 +02:00
|
|
|
sub = sub_map.get(stream.recipient_id, None)
|
2022-07-13 18:47:44 +02:00
|
|
|
if not can_remove_subscribers_from_stream(stream, user_profile, sub):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2020-07-13 16:13:28 +02:00
|
|
|
|
2020-06-14 18:57:02 +02:00
|
|
|
message_retention_days_not_none = False
|
2021-05-20 10:50:17 +02:00
|
|
|
web_public_stream_requested = False
|
2017-01-30 03:02:40 +01:00
|
|
|
for stream_dict in streams_raw:
|
|
|
|
stream_name = stream_dict["name"]
|
|
|
|
stream = existing_stream_map.get(stream_name.lower())
|
|
|
|
if stream is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if stream_dict.get("message_retention_days", None) is not None:
|
2020-06-14 18:57:02 +02:00
|
|
|
message_retention_days_not_none = True
|
2017-01-30 03:02:40 +01:00
|
|
|
missing_stream_dicts.append(stream_dict)
|
2021-05-20 10:50:17 +02:00
|
|
|
|
|
|
|
if autocreate and stream_dict["is_web_public"]:
|
|
|
|
web_public_stream_requested = True
|
2017-01-30 03:02:40 +01:00
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
if len(missing_stream_dicts) == 0:
|
|
|
|
# This is the happy path for callers who expected all of these
|
|
|
|
# streams to exist already.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
created_streams: List[Stream] = []
|
2017-01-30 03:02:40 +01:00
|
|
|
else:
|
|
|
|
# autocreate=True path starts here
|
2021-03-27 05:48:37 +01:00
|
|
|
for stream_dict in missing_stream_dicts:
|
|
|
|
invite_only = stream_dict.get("invite_only", False)
|
|
|
|
if invite_only and not user_profile.can_create_private_streams():
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
if not invite_only and not user_profile.can_create_public_streams():
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2023-08-08 19:28:04 +02:00
|
|
|
if is_default_stream and not user_profile.is_realm_admin:
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
if invite_only and is_default_stream:
|
|
|
|
raise JsonableError(_("A default stream cannot be private."))
|
2021-09-21 20:05:59 +02:00
|
|
|
|
|
|
|
if not autocreate:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
2023-07-17 22:40:33 +02:00
|
|
|
_("Stream(s) ({stream_names}) do not exist").format(
|
|
|
|
stream_names=", ".join(
|
|
|
|
stream_dict["name"] for stream_dict in missing_stream_dicts
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2021-09-21 20:05:59 +02:00
|
|
|
|
2021-05-20 10:50:17 +02:00
|
|
|
if web_public_stream_requested:
|
|
|
|
if not user_profile.realm.web_public_streams_enabled():
|
2022-01-29 00:54:13 +01:00
|
|
|
raise JsonableError(_("Web-public streams are not enabled."))
|
2021-10-04 09:56:16 +02:00
|
|
|
if not user_profile.can_create_web_public_streams():
|
|
|
|
# We set create_web_public_stream_policy to allow only organization owners
|
|
|
|
# to create web-public streams, because of their sensitive nature.
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2021-05-20 10:50:17 +02:00
|
|
|
|
2021-09-21 20:05:59 +02:00
|
|
|
if message_retention_days_not_none:
|
2020-06-14 18:57:02 +02:00
|
|
|
if not user_profile.is_realm_owner:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2021-09-21 20:04:51 +02:00
|
|
|
|
2020-06-14 18:57:02 +02:00
|
|
|
user_profile.realm.ensure_not_on_limited_plan()
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
# We already filtered out existing streams, so dup_streams
|
|
|
|
# will normally be an empty list below, but we protect against somebody
|
|
|
|
# else racing to create the same stream. (This is not an entirely
|
|
|
|
# paranoid approach, since often on Zulip two people will discuss
|
|
|
|
# creating a new stream, and both people eagerly do it.)
|
2021-02-12 08:19:30 +01:00
|
|
|
created_streams, dup_streams = create_streams_if_needed(
|
|
|
|
realm=user_profile.realm, stream_dicts=missing_stream_dicts, acting_user=user_profile
|
|
|
|
)
|
2017-01-30 03:02:40 +01:00
|
|
|
existing_streams += dup_streams
|
|
|
|
|
|
|
|
return existing_streams, created_streams
|
2017-11-14 20:33:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
def access_default_stream_group_by_id(realm: Realm, group_id: int) -> DefaultStreamGroup:
|
|
|
|
try:
|
|
|
|
return DefaultStreamGroup.objects.get(realm=realm, id=group_id)
|
|
|
|
except DefaultStreamGroup.DoesNotExist:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Default stream group with id '{group_id}' does not exist.").format(group_id=group_id)
|
|
|
|
)
|
2019-08-11 18:57:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-11 18:57:54 +02:00
|
|
|
def get_stream_by_narrow_operand_access_unchecked(operand: Union[str, int], realm: Realm) -> Stream:
|
|
|
|
"""This is required over access_stream_* in certain cases where
|
|
|
|
we need the stream data only to prepare a response that user can access
|
|
|
|
and not send it out to unauthorized recipients.
|
|
|
|
"""
|
|
|
|
if isinstance(operand, str):
|
|
|
|
return get_stream(operand, realm)
|
|
|
|
return get_stream_by_id_in_realm(operand, realm)
|
2022-04-14 23:42:50 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_signups_stream(realm: Realm) -> Stream:
|
|
|
|
# This one-liner helps us work around a lint rule.
|
|
|
|
return get_stream("signups", realm)
|
|
|
|
|
|
|
|
|
|
|
|
def ensure_stream(
|
|
|
|
realm: Realm,
|
|
|
|
stream_name: str,
|
|
|
|
invite_only: bool = False,
|
|
|
|
stream_description: str = "",
|
|
|
|
*,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
) -> Stream:
|
|
|
|
return create_stream_if_needed(
|
|
|
|
realm,
|
|
|
|
stream_name,
|
|
|
|
invite_only=invite_only,
|
|
|
|
stream_description=stream_description,
|
|
|
|
acting_user=acting_user,
|
|
|
|
)[0]
|
|
|
|
|
|
|
|
|
2022-06-23 20:00:16 +02:00
|
|
|
def get_occupied_streams(realm: Realm) -> QuerySet[Stream]:
|
2022-04-14 23:42:50 +02:00
|
|
|
"""Get streams with subscribers"""
|
|
|
|
exists_expression = Exists(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
active=True,
|
|
|
|
is_user_active=True,
|
|
|
|
user_profile__realm=realm,
|
|
|
|
recipient_id=OuterRef("recipient_id"),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
occupied_streams = (
|
|
|
|
Stream.objects.filter(realm=realm, deactivated=False)
|
|
|
|
.annotate(occupied=exists_expression)
|
|
|
|
.filter(occupied=True)
|
|
|
|
)
|
|
|
|
return occupied_streams
|
|
|
|
|
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
def stream_to_dict(
|
|
|
|
stream: Stream, recent_traffic: Optional[Dict[int, int]] = None
|
|
|
|
) -> APIStreamDict:
|
|
|
|
if recent_traffic is not None:
|
|
|
|
stream_weekly_traffic = get_average_weekly_stream_traffic(
|
|
|
|
stream.id, stream.date_created, recent_traffic
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# We cannot compute the traffic data for a newly created
|
|
|
|
# stream, so we set "stream_weekly_traffic" field to
|
|
|
|
# "None" for the stream object in creation event.
|
|
|
|
# Also, there are some cases where we do not need to send
|
|
|
|
# the traffic data, like when deactivating a stream, and
|
|
|
|
# passing stream data to spectators.
|
|
|
|
stream_weekly_traffic = None
|
|
|
|
|
2023-07-26 12:31:22 +02:00
|
|
|
return APIStreamDict(
|
|
|
|
can_remove_subscribers_group=stream.can_remove_subscribers_group_id,
|
|
|
|
date_created=datetime_to_timestamp(stream.date_created),
|
|
|
|
description=stream.description,
|
|
|
|
first_message_id=stream.first_message_id,
|
|
|
|
history_public_to_subscribers=stream.history_public_to_subscribers,
|
|
|
|
invite_only=stream.invite_only,
|
|
|
|
is_web_public=stream.is_web_public,
|
|
|
|
message_retention_days=stream.message_retention_days,
|
|
|
|
name=stream.name,
|
|
|
|
rendered_description=stream.rendered_description,
|
|
|
|
stream_id=stream.id,
|
|
|
|
stream_post_policy=stream.stream_post_policy,
|
|
|
|
is_announcement_only=stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS,
|
2023-07-27 16:42:21 +02:00
|
|
|
stream_weekly_traffic=stream_weekly_traffic,
|
2023-07-26 12:31:22 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-05-26 02:51:35 +02:00
|
|
|
def get_web_public_streams(realm: Realm) -> List[APIStreamDict]: # nocoverage
|
2022-04-14 23:42:50 +02:00
|
|
|
query = get_web_public_streams_queryset(realm)
|
2023-07-26 12:41:42 +02:00
|
|
|
streams = query.only(*Stream.API_FIELDS)
|
|
|
|
stream_dicts = [stream_to_dict(stream) for stream in streams]
|
|
|
|
return stream_dicts
|
2022-04-14 23:42:50 +02:00
|
|
|
|
|
|
|
|
2023-08-16 09:32:50 +02:00
|
|
|
def get_streams_for_user(
|
2022-04-14 23:42:50 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
include_public: bool = True,
|
|
|
|
include_web_public: bool = False,
|
|
|
|
include_subscribed: bool = True,
|
|
|
|
include_all_active: bool = False,
|
|
|
|
include_owner_subscribed: bool = False,
|
2023-08-16 09:32:50 +02:00
|
|
|
) -> List[Stream]:
|
2022-04-14 23:42:50 +02:00
|
|
|
if include_all_active and not user_profile.is_realm_admin:
|
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
|
|
|
|
|
|
|
include_public = include_public and user_profile.can_access_public_streams()
|
|
|
|
|
|
|
|
# Start out with all active streams in the realm.
|
|
|
|
query = Stream.objects.filter(realm=user_profile.realm, deactivated=False)
|
|
|
|
|
|
|
|
if include_all_active:
|
2023-07-26 12:41:42 +02:00
|
|
|
streams = query.only(*Stream.API_FIELDS)
|
2022-04-14 23:42:50 +02:00
|
|
|
else:
|
|
|
|
# We construct a query as the or (|) of the various sources
|
|
|
|
# this user requested streams from.
|
|
|
|
query_filter: Optional[Q] = None
|
|
|
|
|
|
|
|
def add_filter_option(option: Q) -> None:
|
|
|
|
nonlocal query_filter
|
|
|
|
if query_filter is None:
|
|
|
|
query_filter = option
|
|
|
|
else:
|
|
|
|
query_filter |= option
|
|
|
|
|
|
|
|
if include_subscribed:
|
|
|
|
subscribed_stream_ids = get_subscribed_stream_ids_for_user(user_profile)
|
|
|
|
recipient_check = Q(id__in=set(subscribed_stream_ids))
|
|
|
|
add_filter_option(recipient_check)
|
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
|
|
|
add_filter_option(invite_only_check)
|
|
|
|
if include_web_public:
|
|
|
|
# This should match get_web_public_streams_queryset
|
|
|
|
web_public_check = Q(
|
|
|
|
is_web_public=True,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
deactivated=False,
|
|
|
|
)
|
|
|
|
add_filter_option(web_public_check)
|
|
|
|
if include_owner_subscribed and user_profile.is_bot:
|
|
|
|
bot_owner = user_profile.bot_owner
|
|
|
|
assert bot_owner is not None
|
|
|
|
owner_stream_ids = get_subscribed_stream_ids_for_user(bot_owner)
|
|
|
|
owner_subscribed_check = Q(id__in=set(owner_stream_ids))
|
|
|
|
add_filter_option(owner_subscribed_check)
|
|
|
|
|
|
|
|
if query_filter is not None:
|
|
|
|
query = query.filter(query_filter)
|
2023-07-26 12:41:42 +02:00
|
|
|
streams = query.only(*Stream.API_FIELDS)
|
2022-04-14 23:42:50 +02:00
|
|
|
else:
|
|
|
|
# Don't bother going to the database with no valid sources
|
2023-07-26 12:41:42 +02:00
|
|
|
return []
|
2022-04-14 23:42:50 +02:00
|
|
|
|
2023-08-16 09:32:50 +02:00
|
|
|
return list(streams)
|
|
|
|
|
|
|
|
|
|
|
|
def do_get_streams(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
include_public: bool = True,
|
|
|
|
include_web_public: bool = False,
|
|
|
|
include_subscribed: bool = True,
|
|
|
|
include_all_active: bool = False,
|
|
|
|
include_default: bool = False,
|
|
|
|
include_owner_subscribed: bool = False,
|
|
|
|
) -> List[APIStreamDict]:
|
|
|
|
# This function is only used by API clients now.
|
|
|
|
|
|
|
|
streams = get_streams_for_user(
|
|
|
|
user_profile,
|
|
|
|
include_public,
|
|
|
|
include_web_public,
|
|
|
|
include_subscribed,
|
|
|
|
include_all_active,
|
|
|
|
include_owner_subscribed,
|
|
|
|
)
|
|
|
|
|
2023-08-07 16:47:48 +02:00
|
|
|
stream_ids = {stream.id for stream in streams}
|
|
|
|
recent_traffic = get_streams_traffic(stream_ids, user_profile.realm)
|
2023-07-27 16:42:21 +02:00
|
|
|
|
2023-09-12 23:19:57 +02:00
|
|
|
stream_dicts = sorted(
|
|
|
|
(stream_to_dict(stream, recent_traffic) for stream in streams), key=lambda elt: elt["name"]
|
|
|
|
)
|
2022-04-14 23:42:50 +02:00
|
|
|
|
|
|
|
if include_default:
|
2023-07-08 21:20:28 +02:00
|
|
|
default_stream_ids = get_default_stream_ids_for_realm(user_profile.realm_id)
|
2023-07-26 12:41:42 +02:00
|
|
|
for stream in stream_dicts:
|
2023-07-08 21:20:28 +02:00
|
|
|
stream["is_default"] = stream["stream_id"] in default_stream_ids
|
2022-04-14 23:42:50 +02:00
|
|
|
|
2023-07-26 12:41:42 +02:00
|
|
|
return stream_dicts
|
2022-05-07 08:56:33 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_subscribed_private_streams_for_user(user_profile: UserProfile) -> QuerySet[Stream]:
|
|
|
|
exists_expression = Exists(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
is_user_active=True,
|
|
|
|
recipient_id=OuterRef("recipient_id"),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
subscribed_private_streams = (
|
|
|
|
Stream.objects.filter(realm=user_profile.realm, invite_only=True, deactivated=False)
|
|
|
|
.annotate(subscribed=exists_expression)
|
|
|
|
.filter(subscribed=True)
|
|
|
|
)
|
|
|
|
return subscribed_private_streams
|