2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Iterable, List, Mapping, Optional, Set, Tuple, Union
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.db.models.query import QuerySet
|
|
|
|
from django.utils.translation import ugettext as _
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2020-06-28 12:26:18 +02:00
|
|
|
from zerver.lib.markdown import convert as markdown_convert
|
2017-01-30 00:48:45 +01:00
|
|
|
from zerver.lib.request import JsonableError
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
DefaultStreamGroup,
|
|
|
|
Realm,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserProfile,
|
|
|
|
active_non_guest_user_ids,
|
|
|
|
bulk_get_streams,
|
|
|
|
get_realm_stream,
|
|
|
|
get_stream,
|
|
|
|
get_stream_by_id_in_realm,
|
|
|
|
is_cross_realm_bot_email,
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
|
|
|
from zerver.tornado.event_queue import send_event
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2020-03-24 14:47:41 +01:00
|
|
|
|
|
|
|
def get_default_value_for_history_public_to_subscribers(
|
|
|
|
realm: Realm,
|
|
|
|
invite_only: bool,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
history_public_to_subscribers: Optional[bool],
|
2020-03-24 14:47:41 +01:00
|
|
|
) -> bool:
|
|
|
|
if invite_only:
|
|
|
|
if history_public_to_subscribers is None:
|
|
|
|
# A private stream's history is non-public by default
|
|
|
|
history_public_to_subscribers = False
|
|
|
|
else:
|
|
|
|
# If we later decide to support public streams without
|
|
|
|
# history, we can remove this code path.
|
|
|
|
history_public_to_subscribers = True
|
|
|
|
|
|
|
|
if realm.is_zephyr_mirror_realm:
|
|
|
|
# In the Zephyr mirroring model, history is unconditionally
|
|
|
|
# not public to subscribers, even for public streams.
|
|
|
|
history_public_to_subscribers = False
|
|
|
|
|
|
|
|
return history_public_to_subscribers
|
|
|
|
|
|
|
|
def render_stream_description(text: str) -> str:
|
2020-06-28 12:26:18 +02:00
|
|
|
return markdown_convert(text, no_previews=True)
|
2020-03-24 14:47:41 +01:00
|
|
|
|
|
|
|
def send_stream_creation_event(stream: Stream, user_ids: List[int]) -> None:
|
|
|
|
event = dict(type="stream", op="create",
|
|
|
|
streams=[stream.to_dict()])
|
|
|
|
send_event(stream.realm, event, user_ids)
|
|
|
|
|
|
|
|
def create_stream_if_needed(realm: Realm,
|
|
|
|
stream_name: str,
|
|
|
|
*,
|
|
|
|
invite_only: bool=False,
|
|
|
|
stream_post_policy: int=Stream.STREAM_POST_POLICY_EVERYONE,
|
|
|
|
history_public_to_subscribers: Optional[bool]=None,
|
2020-06-14 18:57:02 +02:00
|
|
|
stream_description: str="",
|
|
|
|
message_retention_days: Optional[int]=None) -> Tuple[Stream, bool]:
|
2020-03-24 14:47:41 +01:00
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
|
|
|
realm, invite_only, history_public_to_subscribers)
|
|
|
|
|
|
|
|
(stream, created) = Stream.objects.get_or_create(
|
|
|
|
realm=realm,
|
|
|
|
name__iexact=stream_name,
|
|
|
|
defaults = dict(
|
|
|
|
name=stream_name,
|
|
|
|
description=stream_description,
|
|
|
|
invite_only=invite_only,
|
|
|
|
stream_post_policy=stream_post_policy,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_in_zephyr_realm=realm.is_zephyr_mirror_realm,
|
2020-06-14 18:57:02 +02:00
|
|
|
message_retention_days=message_retention_days,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if created:
|
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
|
|
|
|
stream.recipient = recipient
|
|
|
|
stream.rendered_description = render_stream_description(stream_description)
|
|
|
|
stream.save(update_fields=["recipient", "rendered_description"])
|
|
|
|
|
|
|
|
if stream.is_public():
|
|
|
|
send_stream_creation_event(stream, active_non_guest_user_ids(stream.realm_id))
|
|
|
|
else:
|
|
|
|
realm_admin_ids = [user.id for user in
|
|
|
|
stream.realm.get_admin_users_and_bots()]
|
|
|
|
send_stream_creation_event(stream, realm_admin_ids)
|
|
|
|
return stream, created
|
|
|
|
|
|
|
|
def create_streams_if_needed(realm: Realm,
|
|
|
|
stream_dicts: List[Mapping[str, Any]]) -> Tuple[List[Stream], List[Stream]]:
|
|
|
|
"""Note that stream_dict["name"] is assumed to already be stripped of
|
|
|
|
whitespace"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
added_streams: List[Stream] = []
|
|
|
|
existing_streams: List[Stream] = []
|
2020-03-24 14:47:41 +01:00
|
|
|
for stream_dict in stream_dicts:
|
|
|
|
stream, created = create_stream_if_needed(
|
|
|
|
realm,
|
|
|
|
stream_dict["name"],
|
|
|
|
invite_only=stream_dict.get("invite_only", False),
|
|
|
|
stream_post_policy=stream_dict.get("stream_post_policy", Stream.STREAM_POST_POLICY_EVERYONE),
|
|
|
|
history_public_to_subscribers=stream_dict.get("history_public_to_subscribers"),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
stream_description=stream_dict.get("description", ""),
|
2020-06-14 18:57:02 +02:00
|
|
|
message_retention_days=stream_dict.get("message_retention_days", None)
|
2020-03-24 14:47:41 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if created:
|
|
|
|
added_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
return added_streams, existing_streams
|
|
|
|
|
|
|
|
def check_stream_name(stream_name: str) -> None:
|
|
|
|
if stream_name.strip() == "":
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Invalid stream name '{}'").format(stream_name))
|
2020-03-24 14:47:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Stream name too long (limit: {} characters).").format(Stream.MAX_NAME_LENGTH))
|
2020-03-24 14:47:41 +01:00
|
|
|
for i in stream_name:
|
|
|
|
if ord(i) == 0:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Stream name '{}' contains NULL (0x00) characters.").format(stream_name))
|
2020-03-24 14:47:41 +01:00
|
|
|
|
|
|
|
def subscribed_to_stream(user_profile: UserProfile, stream_id: int) -> bool:
|
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream_id).exists()
|
|
|
|
|
|
|
|
def access_stream_for_send_message(sender: UserProfile,
|
|
|
|
stream: Stream,
|
|
|
|
forwarder_user_profile: Optional[UserProfile]) -> None:
|
|
|
|
# Our caller is responsible for making sure that `stream` actually
|
|
|
|
# matches the realm of the sender.
|
|
|
|
|
|
|
|
# Organization admins can send to any stream, irrespective of the stream_post_policy value.
|
|
|
|
if sender.is_realm_admin or is_cross_realm_bot_email(sender.delivery_email):
|
|
|
|
pass
|
|
|
|
elif sender.is_bot and (sender.bot_owner is not None and
|
|
|
|
sender.bot_owner.is_realm_admin):
|
|
|
|
pass
|
|
|
|
elif stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS:
|
|
|
|
raise JsonableError(_("Only organization administrators can send to this stream."))
|
|
|
|
elif stream.stream_post_policy == Stream.STREAM_POST_POLICY_RESTRICT_NEW_MEMBERS:
|
|
|
|
if sender.is_bot and (sender.bot_owner is not None and
|
|
|
|
sender.bot_owner.is_new_member):
|
|
|
|
raise JsonableError(_("New members cannot send to this stream."))
|
|
|
|
elif sender.is_new_member:
|
|
|
|
raise JsonableError(_("New members cannot send to this stream."))
|
|
|
|
|
|
|
|
if not (stream.invite_only or sender.is_guest):
|
|
|
|
# This is a public stream and sender is not a guest user
|
|
|
|
return
|
|
|
|
|
|
|
|
if subscribed_to_stream(sender, stream.id):
|
|
|
|
# It is private, but your are subscribed
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.is_api_super_user:
|
|
|
|
return
|
|
|
|
|
|
|
|
if (forwarder_user_profile is not None and forwarder_user_profile.is_api_super_user):
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.is_bot and (sender.bot_owner is not None and
|
|
|
|
subscribed_to_stream(sender.bot_owner, stream.id)):
|
|
|
|
# Bots can send to any stream their owner can.
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.delivery_email == settings.WELCOME_BOT:
|
|
|
|
# The welcome bot welcomes folks to the stream.
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.delivery_email == settings.NOTIFICATION_BOT:
|
|
|
|
return
|
|
|
|
|
|
|
|
# All other cases are an error.
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Not authorized to send to stream '{}'").format(stream.name))
|
2020-03-24 14:47:41 +01:00
|
|
|
|
2018-12-24 17:04:27 +01:00
|
|
|
def check_for_exactly_one_stream_arg(stream_id: Optional[int], stream: Optional[str]) -> None:
|
|
|
|
if stream_id is None and stream is None:
|
|
|
|
raise JsonableError(_("Please supply 'stream'."))
|
|
|
|
|
|
|
|
if stream_id is not None and stream is not None:
|
|
|
|
raise JsonableError(_("Please choose one: 'stream' or 'stream_id'."))
|
|
|
|
|
2018-02-12 16:02:19 +01:00
|
|
|
def access_stream_for_delete_or_update(user_profile: UserProfile, stream_id: int) -> Stream:
|
2017-08-22 21:41:08 +02:00
|
|
|
# We should only ever use this for realm admins, who are allowed
|
2018-02-12 16:02:19 +01:00
|
|
|
# to delete or update all streams on their realm, even private streams
|
|
|
|
# to which they are not subscribed. We do an assert here, because
|
2017-08-22 21:41:08 +02:00
|
|
|
# all callers should have the require_realm_admin decorator.
|
|
|
|
assert(user_profile.is_realm_admin)
|
|
|
|
|
|
|
|
error = _("Invalid stream id")
|
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(id=stream_id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
|
|
|
if stream.realm_id != user_profile.realm_id:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
|
|
|
return stream
|
|
|
|
|
2018-02-14 17:59:01 +01:00
|
|
|
# Only set allow_realm_admin flag to True when you want to allow realm admin to
|
|
|
|
# access unsubscribed private stream content.
|
2017-11-05 11:15:10 +01:00
|
|
|
def access_stream_common(user_profile: UserProfile, stream: Stream,
|
2018-05-11 01:40:23 +02:00
|
|
|
error: str,
|
2018-02-14 17:59:01 +01:00
|
|
|
require_active: bool=True,
|
2018-03-22 18:52:40 +01:00
|
|
|
allow_realm_admin: bool=False) -> Tuple[Recipient, Optional[Subscription]]:
|
2017-01-30 00:48:45 +01:00
|
|
|
"""Common function for backend code where the target use attempts to
|
|
|
|
access the target stream, returning all the data fetched along the
|
|
|
|
way. If that user does not have permission to access that stream,
|
|
|
|
we throw an exception. A design goal is that the error message is
|
|
|
|
the same for streams you can't access and streams that don't exist."""
|
|
|
|
|
|
|
|
# First, we don't allow any access to streams in other realms.
|
|
|
|
if stream.realm_id != user_profile.realm_id:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
2019-12-05 23:26:24 +01:00
|
|
|
recipient = stream.recipient
|
2017-01-30 00:48:45 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
sub = Subscription.objects.get(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
2017-11-29 23:35:33 +01:00
|
|
|
active=require_active)
|
2017-01-30 00:48:45 +01:00
|
|
|
except Subscription.DoesNotExist:
|
|
|
|
sub = None
|
|
|
|
|
|
|
|
# If the stream is in your realm and public, you can access it.
|
2018-05-02 17:00:06 +02:00
|
|
|
if stream.is_public() and not user_profile.is_guest:
|
2017-01-30 00:48:45 +01:00
|
|
|
return (recipient, sub)
|
|
|
|
|
|
|
|
# Or if you are subscribed to the stream, you can access it.
|
|
|
|
if sub is not None:
|
|
|
|
return (recipient, sub)
|
|
|
|
|
2018-02-14 17:59:01 +01:00
|
|
|
# For some specific callers (e.g. getting list of subscribers,
|
|
|
|
# removing other users from a stream, and updating stream name and
|
|
|
|
# description), we allow realm admins to access stream even if
|
|
|
|
# they are not subscribed to a private stream.
|
|
|
|
if user_profile.is_realm_admin and allow_realm_admin:
|
|
|
|
return (recipient, sub)
|
|
|
|
|
2017-01-30 00:48:45 +01:00
|
|
|
# Otherwise it is a private stream and you're not on it, so throw
|
|
|
|
# an error.
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
2017-11-29 23:35:33 +01:00
|
|
|
def access_stream_by_id(user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
2018-02-14 17:59:01 +01:00
|
|
|
require_active: bool=True,
|
2018-03-22 18:52:40 +01:00
|
|
|
allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]:
|
2018-04-27 15:50:57 +02:00
|
|
|
stream = get_stream_by_id(stream_id)
|
2017-01-30 00:48:45 +01:00
|
|
|
|
2018-04-27 15:50:57 +02:00
|
|
|
error = _("Invalid stream id")
|
2017-11-29 23:35:33 +01:00
|
|
|
(recipient, sub) = access_stream_common(user_profile, stream, error,
|
2018-02-14 17:59:01 +01:00
|
|
|
require_active=require_active,
|
|
|
|
allow_realm_admin=allow_realm_admin)
|
2017-01-30 00:48:45 +01:00
|
|
|
return (stream, recipient, sub)
|
|
|
|
|
2019-08-13 20:20:36 +02:00
|
|
|
def get_public_streams_queryset(realm: Realm) -> 'QuerySet[Stream]':
|
|
|
|
return Stream.objects.filter(realm=realm, invite_only=False,
|
|
|
|
history_public_to_subscribers=True)
|
|
|
|
|
2018-04-27 15:50:57 +02:00
|
|
|
def get_stream_by_id(stream_id: int) -> Stream:
|
|
|
|
error = _("Invalid stream id")
|
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(id=stream_id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_stream_name_available(realm: Realm, name: str) -> None:
|
2017-01-30 06:42:09 +01:00
|
|
|
check_stream_name(name)
|
2017-03-23 07:22:28 +01:00
|
|
|
try:
|
|
|
|
get_stream(name, realm)
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Stream name '{}' is already taken.").format(name))
|
2017-03-23 07:22:28 +01:00
|
|
|
except Stream.DoesNotExist:
|
|
|
|
pass
|
2017-01-30 06:42:09 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def access_stream_by_name(user_profile: UserProfile,
|
2018-08-07 14:13:58 +02:00
|
|
|
stream_name: str,
|
|
|
|
allow_realm_admin: bool=False) -> Tuple[Stream, Recipient, Optional[Subscription]]:
|
2020-06-15 23:22:24 +02:00
|
|
|
error = _("Invalid stream name '{}'").format(stream_name)
|
2017-03-23 07:22:28 +01:00
|
|
|
try:
|
2017-10-22 02:02:27 +02:00
|
|
|
stream = get_realm_stream(stream_name, user_profile.realm_id)
|
2017-03-23 07:22:28 +01:00
|
|
|
except Stream.DoesNotExist:
|
2017-01-30 00:48:45 +01:00
|
|
|
raise JsonableError(error)
|
|
|
|
|
2018-08-07 14:13:58 +02:00
|
|
|
(recipient, sub) = access_stream_common(user_profile, stream, error,
|
|
|
|
allow_realm_admin=allow_realm_admin)
|
2017-01-30 00:48:45 +01:00
|
|
|
return (stream, recipient, sub)
|
2017-01-30 02:57:24 +01:00
|
|
|
|
2018-12-24 17:04:27 +01:00
|
|
|
def access_stream_for_unmute_topic_by_name(user_profile: UserProfile,
|
|
|
|
stream_name: str,
|
|
|
|
error: str) -> Stream:
|
2017-08-30 02:19:34 +02:00
|
|
|
"""
|
|
|
|
It may seem a little silly to have this helper function for unmuting
|
|
|
|
topics, but it gets around a linter warning, and it helps to be able
|
|
|
|
to review all security-related stuff in one place.
|
|
|
|
|
|
|
|
Our policy for accessing streams when you unmute a topic is that you
|
|
|
|
don't necessarily need to have an active subscription or even "legal"
|
|
|
|
access to the stream. Instead, we just verify the stream_id has been
|
|
|
|
muted in the past (not here, but in the caller).
|
|
|
|
|
|
|
|
Long term, we'll probably have folks just pass us in the id of the
|
|
|
|
MutedTopic row to unmute topics.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2018-12-24 17:04:27 +01:00
|
|
|
def access_stream_for_unmute_topic_by_id(user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
error: str) -> Stream:
|
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(id=stream_id, realm_id=user_profile.realm_id)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
raise JsonableError(error)
|
|
|
|
return stream
|
|
|
|
|
2019-08-13 19:52:22 +02:00
|
|
|
def can_access_stream_history(user_profile: UserProfile, stream: Stream) -> bool:
|
2018-04-05 00:10:01 +02:00
|
|
|
"""Determine whether the provided user is allowed to access the
|
|
|
|
history of the target stream. The stream is specified by name.
|
|
|
|
|
|
|
|
This is used by the caller to determine whether this user can get
|
|
|
|
historical messages before they joined for a narrowing search.
|
2017-08-15 18:58:29 +02:00
|
|
|
|
|
|
|
Because of the way our search is currently structured,
|
|
|
|
we may be passed an invalid stream here. We return
|
|
|
|
False in that situation, and subsequent code will do
|
|
|
|
validation and raise the appropriate JsonableError.
|
|
|
|
|
|
|
|
Note that this function should only be used in contexts where
|
|
|
|
access_stream is being called elsewhere to confirm that the user
|
|
|
|
can actually see this stream.
|
|
|
|
"""
|
2018-05-02 17:00:06 +02:00
|
|
|
if stream.is_history_realm_public() and not user_profile.is_guest:
|
2018-04-05 00:28:14 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
if stream.is_history_public_to_subscribers():
|
|
|
|
# In this case, we check if the user is subscribed.
|
2020-06-15 23:22:24 +02:00
|
|
|
error = _("Invalid stream name '{}'").format(stream.name)
|
2018-04-05 00:28:14 +02:00
|
|
|
try:
|
|
|
|
(recipient, sub) = access_stream_common(user_profile, stream, error)
|
|
|
|
except JsonableError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
return False
|
2017-08-15 18:58:29 +02:00
|
|
|
|
2019-08-13 19:52:22 +02:00
|
|
|
def can_access_stream_history_by_name(user_profile: UserProfile, stream_name: str) -> bool:
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return False
|
|
|
|
return can_access_stream_history(user_profile, stream)
|
|
|
|
|
2019-08-07 17:32:19 +02:00
|
|
|
def can_access_stream_history_by_id(user_profile: UserProfile, stream_id: int) -> bool:
|
|
|
|
try:
|
|
|
|
stream = get_stream_by_id_in_realm(stream_id, user_profile.realm)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return False
|
|
|
|
return can_access_stream_history(user_profile, stream)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def filter_stream_authorization(user_profile: UserProfile,
|
|
|
|
streams: Iterable[Stream]) -> Tuple[List[Stream], List[Stream]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
streams_subscribed: Set[int] = set()
|
2019-12-06 00:15:59 +01:00
|
|
|
recipient_ids = [stream.recipient_id for stream in streams]
|
2017-01-30 02:57:24 +01:00
|
|
|
subs = Subscription.objects.filter(user_profile=user_profile,
|
2019-12-06 00:15:59 +01:00
|
|
|
recipient_id__in=recipient_ids,
|
2017-01-30 02:57:24 +01:00
|
|
|
active=True)
|
|
|
|
|
|
|
|
for sub in subs:
|
|
|
|
streams_subscribed.add(sub.recipient.type_id)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
unauthorized_streams: List[Stream] = []
|
2017-01-30 02:57:24 +01:00
|
|
|
for stream in streams:
|
2017-07-05 11:50:47 +02:00
|
|
|
# The user is authorized for their own streams
|
2017-01-30 02:57:24 +01:00
|
|
|
if stream.id in streams_subscribed:
|
|
|
|
continue
|
|
|
|
|
2018-05-02 17:00:06 +02:00
|
|
|
# Users are not authorized for invite_only streams, and guest
|
|
|
|
# users are not authorized for any streams
|
|
|
|
if stream.invite_only or user_profile.is_guest:
|
2017-01-30 02:57:24 +01:00
|
|
|
unauthorized_streams.append(stream)
|
|
|
|
|
|
|
|
authorized_streams = [stream for stream in streams if
|
2020-04-09 21:51:58 +02:00
|
|
|
stream.id not in {stream.id for stream in unauthorized_streams}]
|
2017-01-30 02:57:24 +01:00
|
|
|
return authorized_streams, unauthorized_streams
|
2017-01-30 03:02:40 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def list_to_streams(streams_raw: Iterable[Mapping[str, Any]],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
autocreate: bool=False) -> Tuple[List[Stream], List[Stream]]:
|
2017-01-30 03:02:40 +01:00
|
|
|
"""Converts list of dicts to a list of Streams, validating input in the process
|
|
|
|
|
|
|
|
For each stream name, we validate it to ensure it meets our
|
2017-01-30 07:01:19 +01:00
|
|
|
requirements for a proper stream name using check_stream_name.
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream dictionaries to process;
|
|
|
|
names should already be stripped of whitespace by the caller.
|
2020-03-28 01:25:56 +01:00
|
|
|
@param user_profile The user for whom we are retrieving the streams
|
2017-01-30 03:02:40 +01:00
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
"""
|
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
stream_set = {stream_dict["name"] for stream_dict in streams_raw}
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
for stream_name in stream_set:
|
|
|
|
# Stream names should already have been stripped by the
|
|
|
|
# caller, but it makes sense to verify anyway.
|
|
|
|
assert stream_name == stream_name.strip()
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2017-01-30 03:02:40 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
existing_streams: List[Stream] = []
|
|
|
|
missing_stream_dicts: List[Mapping[str, Any]] = []
|
2017-01-30 03:02:40 +01:00
|
|
|
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
|
|
|
|
|
2020-06-14 18:57:02 +02:00
|
|
|
message_retention_days_not_none = False
|
2017-01-30 03:02:40 +01:00
|
|
|
for stream_dict in streams_raw:
|
|
|
|
stream_name = stream_dict["name"]
|
|
|
|
stream = existing_stream_map.get(stream_name.lower())
|
|
|
|
if stream is None:
|
2020-06-14 18:57:02 +02:00
|
|
|
if stream_dict.get('message_retention_days', None) is not None:
|
|
|
|
message_retention_days_not_none = True
|
2017-01-30 03:02:40 +01:00
|
|
|
missing_stream_dicts.append(stream_dict)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
if len(missing_stream_dicts) == 0:
|
|
|
|
# This is the happy path for callers who expected all of these
|
|
|
|
# streams to exist already.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
created_streams: List[Stream] = []
|
2017-01-30 03:02:40 +01:00
|
|
|
else:
|
|
|
|
# autocreate=True path starts here
|
|
|
|
if not user_profile.can_create_streams():
|
|
|
|
raise JsonableError(_('User cannot create streams.'))
|
|
|
|
elif not autocreate:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Stream(s) ({}) do not exist").format(
|
|
|
|
", ".join(stream_dict["name"] for stream_dict in missing_stream_dicts),
|
|
|
|
))
|
2020-06-14 18:57:02 +02:00
|
|
|
elif message_retention_days_not_none:
|
|
|
|
if not user_profile.is_realm_owner:
|
|
|
|
raise JsonableError(_('User cannot create stream with this settings.'))
|
|
|
|
user_profile.realm.ensure_not_on_limited_plan()
|
2017-01-30 03:02:40 +01:00
|
|
|
|
|
|
|
# We already filtered out existing streams, so dup_streams
|
|
|
|
# will normally be an empty list below, but we protect against somebody
|
|
|
|
# else racing to create the same stream. (This is not an entirely
|
|
|
|
# paranoid approach, since often on Zulip two people will discuss
|
|
|
|
# creating a new stream, and both people eagerly do it.)
|
|
|
|
created_streams, dup_streams = create_streams_if_needed(realm=user_profile.realm,
|
|
|
|
stream_dicts=missing_stream_dicts)
|
|
|
|
existing_streams += dup_streams
|
|
|
|
|
|
|
|
return existing_streams, created_streams
|
2017-11-14 20:33:09 +01:00
|
|
|
|
|
|
|
def access_default_stream_group_by_id(realm: Realm, group_id: int) -> DefaultStreamGroup:
|
|
|
|
try:
|
|
|
|
return DefaultStreamGroup.objects.get(realm=realm, id=group_id)
|
|
|
|
except DefaultStreamGroup.DoesNotExist:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Default stream group with id '{}' does not exist.").format(group_id))
|
2019-08-11 18:57:54 +02:00
|
|
|
|
|
|
|
def get_stream_by_narrow_operand_access_unchecked(operand: Union[str, int], realm: Realm) -> Stream:
|
|
|
|
"""This is required over access_stream_* in certain cases where
|
|
|
|
we need the stream data only to prepare a response that user can access
|
|
|
|
and not send it out to unauthorized recipients.
|
|
|
|
"""
|
|
|
|
if isinstance(operand, str):
|
|
|
|
return get_stream(operand, realm)
|
|
|
|
return get_stream_by_id_in_realm(operand, realm)
|