2020-06-11 00:54:34 +02:00
|
|
|
from collections import defaultdict
|
2021-04-30 00:15:33 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Set, Union
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.conf import settings
|
2020-08-21 17:12:05 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2020-06-21 02:36:20 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.db import transaction
|
2016-05-29 16:41:41 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2020-06-26 15:17:12 +02:00
|
|
|
from django.utils.translation import override as override_language
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2022-04-14 23:34:23 +02:00
|
|
|
from zerver.actions.default_streams import (
|
|
|
|
do_add_default_stream,
|
|
|
|
do_add_streams_to_default_stream_group,
|
|
|
|
do_change_default_stream_group_description,
|
|
|
|
do_change_default_stream_group_name,
|
|
|
|
do_create_default_stream_group,
|
|
|
|
do_remove_default_stream,
|
|
|
|
do_remove_default_stream_group,
|
|
|
|
do_remove_streams_from_default_stream_group,
|
|
|
|
get_default_streams_for_realm,
|
|
|
|
)
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages
|
2022-04-14 23:50:10 +02:00
|
|
|
from zerver.actions.message_send import (
|
|
|
|
do_send_messages,
|
|
|
|
internal_prep_private_message,
|
|
|
|
internal_prep_stream_message,
|
|
|
|
)
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import (
|
2020-06-11 00:54:34 +02:00
|
|
|
bulk_add_subscriptions,
|
|
|
|
bulk_remove_subscriptions,
|
|
|
|
do_change_stream_description,
|
2020-06-14 18:57:02 +02:00
|
|
|
do_change_stream_message_retention_days,
|
2020-11-10 14:15:04 +01:00
|
|
|
do_change_stream_permission,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_change_stream_post_policy,
|
|
|
|
do_change_subscription_property,
|
|
|
|
do_deactivate_stream,
|
|
|
|
do_rename_stream,
|
2021-07-17 00:29:45 +02:00
|
|
|
get_subscriber_ids,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.context_processors import get_valid_realm_from_request
|
|
|
|
from zerver.decorator import (
|
|
|
|
authenticated_json_view,
|
|
|
|
require_non_guest_user,
|
|
|
|
require_post,
|
|
|
|
require_realm_admin,
|
|
|
|
)
|
2021-07-04 08:52:23 +02:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
ErrorCode,
|
|
|
|
JsonableError,
|
|
|
|
OrganizationOwnerRequired,
|
|
|
|
ResourceNotFoundError,
|
|
|
|
)
|
2021-12-29 17:54:08 +01:00
|
|
|
from zerver.lib.mention import MentionBackend, silent_mention_syntax_for_user
|
2021-08-14 02:28:52 +02:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2021-07-04 08:52:23 +02:00
|
|
|
from zerver.lib.response import json_success
|
2022-09-20 21:24:59 +02:00
|
|
|
from zerver.lib.retention import STREAM_MESSAGE_BATCH_SIZE as RETENTION_STREAM_MESSAGE_BATCH_SIZE
|
2020-06-21 11:14:35 +02:00
|
|
|
from zerver.lib.retention import parse_message_retention_days
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.streams import (
|
2020-09-29 18:06:50 +02:00
|
|
|
StreamDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
access_default_stream_group_by_id,
|
|
|
|
access_stream_by_id,
|
|
|
|
access_stream_by_name,
|
|
|
|
access_stream_for_delete_or_update,
|
2020-08-21 17:12:05 +02:00
|
|
|
access_web_public_stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_stream_name_available,
|
2022-04-14 23:42:50 +02:00
|
|
|
do_get_streams,
|
2020-06-11 00:54:34 +02:00
|
|
|
filter_stream_authorization,
|
2022-03-04 22:28:37 +01:00
|
|
|
get_stream_permission_policy_name,
|
2020-06-11 00:54:34 +02:00
|
|
|
list_to_streams,
|
|
|
|
)
|
2022-01-11 21:47:44 +01:00
|
|
|
from zerver.lib.string_validation import check_stream_name
|
2022-04-14 23:45:12 +02:00
|
|
|
from zerver.lib.subscription_info import gather_subscriptions
|
2020-08-21 17:12:05 +02:00
|
|
|
from zerver.lib.topic import (
|
|
|
|
get_topic_history_for_public_stream,
|
|
|
|
get_topic_history_for_stream,
|
|
|
|
messages_for_topic,
|
|
|
|
)
|
2020-06-22 22:37:00 +02:00
|
|
|
from zerver.lib.types import Validator
|
2021-07-25 16:31:12 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_bool,
|
|
|
|
check_capped_string,
|
|
|
|
check_color,
|
|
|
|
check_dict,
|
|
|
|
check_dict_only,
|
|
|
|
check_int,
|
|
|
|
check_int_in,
|
|
|
|
check_list,
|
|
|
|
check_string,
|
2020-06-14 18:57:02 +02:00
|
|
|
check_string_or_int,
|
2020-06-20 10:37:43 +02:00
|
|
|
check_union,
|
2020-06-11 00:54:34 +02:00
|
|
|
to_non_negative_int,
|
|
|
|
)
|
|
|
|
from zerver.models import (
|
|
|
|
Realm,
|
|
|
|
Stream,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
get_active_user,
|
|
|
|
get_active_user_profile_by_id_in_realm,
|
|
|
|
get_system_bot,
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PrincipalError(JsonableError):
|
2017-07-21 02:17:28 +02:00
|
|
|
code = ErrorCode.UNAUTHORIZED_PRINCIPAL
|
2021-02-12 08:20:45 +01:00
|
|
|
data_fields = ["principal"]
|
2017-07-21 02:06:40 +02:00
|
|
|
http_status_code = 403
|
|
|
|
|
2020-04-09 19:07:57 +02:00
|
|
|
def __init__(self, principal: Union[int, str]) -> None:
|
|
|
|
self.principal: Union[int, str] = principal
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-21 02:17:28 +02:00
|
|
|
@staticmethod
|
2018-04-24 03:47:28 +02:00
|
|
|
def msg_format() -> str:
|
2017-07-21 02:17:28 +02:00
|
|
|
return _("User not authorized to execute queries on behalf of '{principal}'")
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-09 19:07:57 +02:00
|
|
|
def principal_to_user_profile(agent: UserProfile, principal: Union[str, int]) -> UserProfile:
|
2015-11-23 14:35:16 +01:00
|
|
|
try:
|
2020-04-09 19:07:57 +02:00
|
|
|
if isinstance(principal, str):
|
|
|
|
return get_active_user(principal, agent.realm)
|
|
|
|
else:
|
|
|
|
return get_active_user_profile_by_id_in_realm(principal, agent.realm)
|
2015-11-23 14:35:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Zulip in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-27 21:44:12 +02:00
|
|
|
def user_directly_controls_user(user_profile: UserProfile, target: UserProfile) -> bool:
|
|
|
|
"""Returns whether the target user is either the current user or a bot
|
|
|
|
owned by the current user"""
|
|
|
|
if user_profile == target:
|
2020-06-05 22:05:13 +02:00
|
|
|
return True
|
2022-06-27 21:44:12 +02:00
|
|
|
if target.is_bot and target.bot_owner == user_profile:
|
|
|
|
return True
|
|
|
|
return False
|
2020-06-05 22:05:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def deactivate_stream_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int
|
|
|
|
) -> HttpResponse:
|
2020-08-19 21:38:38 +02:00
|
|
|
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
|
2020-06-29 15:02:07 +02:00
|
|
|
do_deactivate_stream(stream, acting_user=user_profile)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_default_stream(
|
2021-04-07 22:00:44 +02:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2020-08-08 00:49:24 +02:00
|
|
|
if stream.invite_only:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Private streams cannot be made default."))
|
2017-01-30 04:23:08 +01:00
|
|
|
do_add_default_stream(stream)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_default_stream_group(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_name: str = REQ(),
|
|
|
|
description: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
|
2021-07-26 16:29:19 +02:00
|
|
|
) -> HttpResponse:
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams.append(stream)
|
2017-11-14 20:51:34 +01:00
|
|
|
do_create_default_stream_group(user_profile.realm, group_name, description, streams)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_default_stream_group_info(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_id: int,
|
2021-05-04 11:34:11 +02:00
|
|
|
new_group_name: Optional[str] = REQ(default=None),
|
|
|
|
new_description: Optional[str] = REQ(default=None),
|
2021-07-26 16:29:19 +02:00
|
|
|
) -> HttpResponse:
|
2017-11-14 21:06:02 +01:00
|
|
|
if not new_group_name and not new_description:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_('You must pass "new_description" or "new_group_name".'))
|
2017-11-14 20:51:34 +01:00
|
|
|
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
2017-11-14 21:06:02 +01:00
|
|
|
if new_group_name is not None:
|
|
|
|
do_change_default_stream_group_name(user_profile.realm, group, new_group_name)
|
|
|
|
if new_description is not None:
|
|
|
|
do_change_default_stream_group_description(user_profile.realm, group, new_description)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-11-14 20:51:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_default_stream_group_streams(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_id: int,
|
|
|
|
op: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
|
2021-07-26 16:29:19 +02:00
|
|
|
) -> HttpResponse:
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams.append(stream)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if op == "add":
|
2017-11-14 20:33:09 +01:00
|
|
|
do_add_streams_to_default_stream_group(user_profile.realm, group, streams)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif op == "remove":
|
2017-11-14 20:33:09 +01:00
|
|
|
do_remove_streams_from_default_stream_group(user_profile.realm, group, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
else:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_('Invalid value for "op". Specify one of "add" or "remove".'))
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def remove_default_stream_group(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, group_id: int
|
2021-07-26 16:29:19 +02:00
|
|
|
) -> HttpResponse:
|
2017-11-14 20:33:09 +01:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
|
|
|
do_remove_default_stream_group(user_profile.realm, group)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def remove_default_stream(
|
2021-04-07 22:00:44 +02:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(
|
2020-03-22 20:29:49 +01:00
|
|
|
user_profile,
|
|
|
|
stream_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
allow_realm_admin=True,
|
2020-03-22 20:29:49 +01:00
|
|
|
)
|
2017-01-30 04:25:40 +01:00
|
|
|
do_remove_default_stream(stream)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_stream_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
description: Optional[str] = REQ(
|
2021-05-08 18:41:54 +02:00
|
|
|
str_validator=check_capped_string(Stream.MAX_DESCRIPTION_LENGTH), default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
is_private: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
|
|
|
is_announcement_only: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_post_policy: Optional[int] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES), default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2020-11-10 14:15:04 +01:00
|
|
|
is_web_public: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2021-05-08 18:41:54 +02:00
|
|
|
new_name: Optional[str] = REQ(default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days: Optional[Union[int, str]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_string_or_int, default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2018-02-12 16:02:19 +01:00
|
|
|
# We allow realm administrators to to update the stream name and
|
|
|
|
# description even for private streams.
|
2020-08-19 21:38:38 +02:00
|
|
|
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
|
2020-06-14 18:57:02 +02:00
|
|
|
|
2022-08-01 04:19:43 +02:00
|
|
|
# Validate that the proposed state for permissions settings is permitted.
|
|
|
|
if is_private is not None:
|
|
|
|
proposed_is_private = is_private
|
|
|
|
else:
|
|
|
|
proposed_is_private = stream.invite_only
|
|
|
|
|
|
|
|
if is_web_public is not None:
|
|
|
|
proposed_is_web_public = is_web_public
|
|
|
|
else:
|
|
|
|
proposed_is_web_public = stream.is_web_public
|
|
|
|
|
|
|
|
if stream.realm.is_zephyr_mirror_realm:
|
|
|
|
# In the Zephyr mirroring model, history is unconditionally
|
|
|
|
# not public to subscribers, even for public streams.
|
|
|
|
proposed_history_public_to_subscribers = False
|
|
|
|
elif history_public_to_subscribers is not None:
|
|
|
|
proposed_history_public_to_subscribers = history_public_to_subscribers
|
|
|
|
elif is_private is not None:
|
|
|
|
# By default, private streams have protected history while for
|
|
|
|
# public streams history is public by default.
|
|
|
|
proposed_history_public_to_subscribers = not is_private
|
|
|
|
else:
|
|
|
|
proposed_history_public_to_subscribers = stream.history_public_to_subscribers
|
|
|
|
|
|
|
|
# Web-public streams must have subscriber-public history.
|
|
|
|
if proposed_is_web_public and not proposed_history_public_to_subscribers:
|
|
|
|
raise JsonableError(_("Invalid parameters"))
|
|
|
|
|
|
|
|
# Web-public streams must not be private.
|
|
|
|
if proposed_is_web_public and proposed_is_private:
|
|
|
|
raise JsonableError(_("Invalid parameters"))
|
|
|
|
|
|
|
|
# Public streams must be public to subscribers.
|
|
|
|
if not proposed_is_private and not proposed_history_public_to_subscribers:
|
|
|
|
if stream.realm.is_zephyr_mirror_realm:
|
|
|
|
# All Zephyr realm streams violate this rule.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise JsonableError(_("Invalid parameters"))
|
|
|
|
|
|
|
|
if is_private is not None:
|
|
|
|
# Default streams cannot be made private.
|
|
|
|
default_stream_ids = {s.id for s in get_default_streams_for_realm(stream.realm_id)}
|
|
|
|
if is_private and stream.id in default_stream_ids:
|
|
|
|
raise JsonableError(_("Default streams cannot be made private."))
|
|
|
|
|
|
|
|
# We require even realm administrators to be actually
|
|
|
|
# subscribed to make a private stream public, via this
|
|
|
|
# stricted access_stream check.
|
|
|
|
access_stream_by_id(user_profile, stream_id)
|
|
|
|
|
|
|
|
# Enforce restrictions on creating web-public streams. Since these
|
|
|
|
# checks are only required when changing a stream to be
|
|
|
|
# web-public, we don't use an "is not None" check.
|
|
|
|
if is_web_public:
|
|
|
|
if not user_profile.realm.web_public_streams_enabled():
|
|
|
|
raise JsonableError(_("Web-public streams are not enabled."))
|
|
|
|
if not user_profile.can_create_web_public_streams():
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
|
|
|
|
if (
|
|
|
|
is_private is not None
|
|
|
|
or is_web_public is not None
|
|
|
|
or history_public_to_subscribers is not None
|
|
|
|
):
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=proposed_is_private,
|
|
|
|
history_public_to_subscribers=proposed_history_public_to_subscribers,
|
|
|
|
is_web_public=proposed_is_web_public,
|
|
|
|
acting_user=user_profile,
|
|
|
|
)
|
|
|
|
|
2020-06-14 18:57:02 +02:00
|
|
|
if message_retention_days is not None:
|
|
|
|
if not user_profile.is_realm_owner:
|
|
|
|
raise OrganizationOwnerRequired()
|
|
|
|
user_profile.realm.ensure_not_on_limited_plan()
|
2021-12-06 18:19:12 +01:00
|
|
|
new_message_retention_days_value = parse_message_retention_days(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
|
|
|
|
)
|
2021-12-06 18:19:12 +01:00
|
|
|
do_change_stream_message_retention_days(
|
|
|
|
stream, user_profile, new_message_retention_days_value
|
|
|
|
)
|
2020-06-14 18:57:02 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if description is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "\n" in description:
|
2019-02-20 21:09:21 +01:00
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
description = description.replace("\n", " ")
|
2021-12-14 20:08:48 +01:00
|
|
|
do_change_stream_description(stream, description, acting_user=user_profile)
|
2017-01-30 01:48:38 +01:00
|
|
|
if new_name is not None:
|
2017-01-30 03:59:25 +01:00
|
|
|
new_name = new_name.strip()
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name == new_name:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Stream already has that name!"))
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name.lower() != new_name.lower():
|
|
|
|
# Check that the stream name is available (unless we are
|
|
|
|
# are only changing the casing of the stream name).
|
|
|
|
check_stream_name_available(user_profile.realm, new_name)
|
2019-01-05 12:47:38 +01:00
|
|
|
do_rename_stream(stream, new_name, user_profile)
|
2018-05-12 07:25:42 +02:00
|
|
|
if is_announcement_only is not None:
|
2020-02-04 21:50:55 +01:00
|
|
|
# is_announcement_only is a legacy way to specify
|
|
|
|
# stream_post_policy. We can probably just delete this code,
|
|
|
|
# since we're not aware of clients that used it, but we're
|
|
|
|
# keeping it for backwards-compatibility for now.
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_EVERYONE
|
|
|
|
if is_announcement_only:
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_ADMINS
|
|
|
|
if stream_post_policy is not None:
|
2021-12-15 01:04:35 +01:00
|
|
|
do_change_stream_post_policy(stream, stream_post_policy, acting_user=user_profile)
|
2018-02-12 16:02:19 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-08 21:58:38 +02:00
|
|
|
@has_request_variables
|
|
|
|
def list_subscriptions_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
include_subscribers: bool = REQ(json_validator=check_bool, default=False),
|
2019-08-08 21:58:38 +02:00
|
|
|
) -> HttpResponse:
|
|
|
|
subscribed, _ = gather_subscriptions(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
include_subscribers=include_subscribers,
|
2019-08-08 21:58:38 +02:00
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"subscriptions": subscribed})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-25 01:01:10 +02:00
|
|
|
add_subscriptions_schema = check_list(
|
|
|
|
check_dict_only(
|
2021-02-12 08:20:45 +01:00
|
|
|
required_keys=[("name", check_string)],
|
2020-06-25 01:01:10 +02:00
|
|
|
optional_keys=[
|
2021-02-12 08:20:45 +01:00
|
|
|
("color", check_color),
|
|
|
|
("description", check_capped_string(Stream.MAX_DESCRIPTION_LENGTH)),
|
2020-06-25 01:01:10 +02:00
|
|
|
],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
remove_subscriptions_schema = check_list(check_string)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-30 00:15:33 +02:00
|
|
|
delete: Sequence[str] = REQ(json_validator=remove_subscriptions_schema, default=[]),
|
|
|
|
add: Sequence[Mapping[str, str]] = REQ(json_validator=add_subscriptions_schema, default=[]),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
if not add and not delete:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-16 01:19:02 +01:00
|
|
|
thunks = [
|
|
|
|
lambda: add_subscriptions_backend(request, user_profile, streams_raw=add),
|
|
|
|
lambda: remove_subscriptions_backend(request, user_profile, streams_raw=delete),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2022-01-31 15:27:58 +01:00
|
|
|
data = compose_views(thunks)
|
2016-09-12 17:21:49 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-01-31 15:27:58 +01:00
|
|
|
|
|
|
|
def compose_views(thunks: List[Callable[[], HttpResponse]]) -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-16 01:19:02 +01:00
|
|
|
This takes a series of thunks and calls them in sequence, and it
|
|
|
|
smushes all the json results into a single response when
|
|
|
|
everything goes right. (This helps clients avoid extra latency
|
|
|
|
hops.) It rolls back the transaction when things go wrong in any
|
|
|
|
one of the composed methods.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-09-12 17:21:49 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
json_dict: Dict[str, Any] = {}
|
2016-09-12 17:21:49 +02:00
|
|
|
with transaction.atomic():
|
2021-02-16 01:19:02 +01:00
|
|
|
for thunk in thunks:
|
|
|
|
response = thunk()
|
2020-08-07 01:09:47 +02:00
|
|
|
json_dict.update(orjson.loads(response.content))
|
2022-01-31 15:27:58 +01:00
|
|
|
return json_dict
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:37:00 +02:00
|
|
|
check_principals: Validator[Union[List[str], List[int]]] = check_union(
|
|
|
|
[check_list(check_string), check_list(check_int)],
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def remove_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-30 00:15:33 +02:00
|
|
|
streams_raw: Sequence[str] = REQ("subscriptions", json_validator=remove_subscriptions_schema),
|
2021-02-12 08:19:30 +01:00
|
|
|
principals: Optional[Union[List[str], List[int]]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_principals, default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-12-24 14:29:40 +01:00
|
|
|
realm = user_profile.realm
|
2018-02-20 18:56:01 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
streams_as_dict: List[StreamDict] = []
|
2016-11-20 21:55:50 +01:00
|
|
|
for stream_name in streams_raw:
|
|
|
|
streams_as_dict.append({"name": stream_name.strip()})
|
|
|
|
|
2022-06-27 21:44:12 +02:00
|
|
|
unsubscribing_others = False
|
2015-11-23 14:35:16 +01:00
|
|
|
if principals:
|
2022-06-27 21:44:12 +02:00
|
|
|
people_to_unsub = set()
|
|
|
|
for principal in principals:
|
|
|
|
target_user = principal_to_user_profile(user_profile, principal)
|
|
|
|
people_to_unsub.add(target_user)
|
|
|
|
if not user_directly_controls_user(user_profile, target_user):
|
|
|
|
unsubscribing_others = True
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2020-04-09 21:51:58 +02:00
|
|
|
people_to_unsub = {user_profile}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2022-06-27 21:44:12 +02:00
|
|
|
streams, __ = list_to_streams(
|
|
|
|
streams_as_dict,
|
|
|
|
user_profile,
|
|
|
|
unsubscribing_others=unsubscribing_others,
|
|
|
|
)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: Dict[str, List[str]] = dict(removed=[], not_removed=[])
|
2021-02-12 08:19:30 +01:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(
|
2021-12-24 14:29:40 +01:00
|
|
|
realm, people_to_unsub, streams, acting_user=user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
for (subscriber, removed_stream) in removed:
|
|
|
|
result["removed"].append(removed_stream.name)
|
|
|
|
for (subscriber, not_subscribed_stream) in not_subscribed:
|
2019-10-13 05:30:34 +02:00
|
|
|
result["not_removed"].append(not_subscribed_stream.name)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def you_were_just_subscribed_message(
|
|
|
|
acting_user: UserProfile, recipient_user: UserProfile, stream_names: Set[str]
|
|
|
|
) -> str:
|
2020-09-02 06:20:26 +02:00
|
|
|
subscriptions = sorted(stream_names)
|
2017-05-16 17:10:42 +02:00
|
|
|
if len(subscriptions) == 1:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(recipient_user.default_language):
|
2020-06-30 13:37:47 +02:00
|
|
|
return _("{user_full_name} subscribed you to the stream {stream_name}.").format(
|
2021-12-29 19:55:30 +01:00
|
|
|
user_full_name=f"@**{acting_user.full_name}|{acting_user.id}**",
|
2020-06-30 13:37:47 +02:00
|
|
|
stream_name=f"#**{subscriptions[0]}**",
|
2020-06-26 15:17:12 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
with override_language(recipient_user.default_language):
|
2020-06-30 13:37:47 +02:00
|
|
|
message = _("{user_full_name} subscribed you to the following streams:").format(
|
2021-12-29 19:55:30 +01:00
|
|
|
user_full_name=f"@**{acting_user.full_name}|{acting_user.id}**",
|
2020-06-15 23:22:24 +02:00
|
|
|
)
|
2018-06-19 18:22:27 +02:00
|
|
|
message += "\n\n"
|
|
|
|
for stream_name in subscriptions:
|
2020-06-10 06:41:04 +02:00
|
|
|
message += f"* #**{stream_name}**\n"
|
2018-06-19 18:22:27 +02:00
|
|
|
return message
|
2017-05-16 17:10:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:37:00 +02:00
|
|
|
RETENTION_DEFAULT: Union[str, int] = "realm_default"
|
|
|
|
EMPTY_PRINCIPALS: Union[Sequence[str], Sequence[int]] = []
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-04 19:14:29 +02:00
|
|
|
@require_non_guest_user
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def add_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-30 00:15:33 +02:00
|
|
|
streams_raw: Sequence[Mapping[str, str]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
"subscriptions", json_validator=add_subscriptions_schema
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
invite_only: bool = REQ(json_validator=check_bool, default=False),
|
2021-05-20 10:50:17 +02:00
|
|
|
is_web_public: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_post_policy: int = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES),
|
2021-02-12 08:19:30 +01:00
|
|
|
default=Stream.STREAM_POST_POLICY_EVERYONE,
|
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days: Union[str, int] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_string_or_int, default=RETENTION_DEFAULT
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
announce: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
principals: Union[Sequence[str], Sequence[int]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_principals,
|
2021-02-12 08:19:30 +01:00
|
|
|
default=EMPTY_PRINCIPALS,
|
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
authorization_errors_fatal: bool = REQ(json_validator=check_bool, default=True),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-13 15:16:27 +02:00
|
|
|
realm = user_profile.realm
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts = []
|
2019-01-10 15:03:15 +01:00
|
|
|
color_map = {}
|
2016-06-03 00:34:22 +02:00
|
|
|
for stream_dict in streams_raw:
|
2019-01-10 15:03:15 +01:00
|
|
|
# 'color' field is optional
|
|
|
|
# check for its presence in the streams_raw first
|
2021-02-12 08:20:45 +01:00
|
|
|
if "color" in stream_dict:
|
|
|
|
color_map[stream_dict["name"]] = stream_dict["color"]
|
2019-01-10 15:03:15 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
stream_dict_copy: StreamDict = {}
|
|
|
|
stream_dict_copy["name"] = stream_dict["name"].strip()
|
|
|
|
|
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
if "description" in stream_dict:
|
|
|
|
stream_dict_copy["description"] = stream_dict["description"].replace("\n", " ")
|
|
|
|
|
2016-11-21 00:16:52 +01:00
|
|
|
stream_dict_copy["invite_only"] = invite_only
|
2021-05-20 10:50:17 +02:00
|
|
|
stream_dict_copy["is_web_public"] = is_web_public
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_dict_copy["stream_post_policy"] = stream_post_policy
|
2018-04-27 01:00:26 +02:00
|
|
|
stream_dict_copy["history_public_to_subscribers"] = history_public_to_subscribers
|
2020-06-21 11:14:35 +02:00
|
|
|
stream_dict_copy["message_retention_days"] = parse_message_retention_days(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
|
|
|
|
)
|
2020-09-29 18:06:50 +02:00
|
|
|
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts.append(stream_dict_copy)
|
|
|
|
|
|
|
|
# Validation of the streams arguments, including enforcement of
|
2017-01-30 07:01:19 +01:00
|
|
|
# can_create_streams policy and check_stream_name policy is inside
|
2016-11-20 21:55:50 +01:00
|
|
|
# list_to_streams.
|
2021-02-12 08:19:30 +01:00
|
|
|
existing_streams, created_streams = list_to_streams(stream_dicts, user_profile, autocreate=True)
|
|
|
|
authorized_streams, unauthorized_streams = filter_stream_authorization(
|
|
|
|
user_profile, existing_streams
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:19:30 +01:00
|
|
|
_("Unable to access stream ({stream_name}).").format(
|
|
|
|
stream_name=unauthorized_streams[0].name,
|
|
|
|
)
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0:
|
2020-10-13 15:16:27 +02:00
|
|
|
if realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:19:30 +01:00
|
|
|
_("You can only invite other Zephyr mirroring users to private streams.")
|
|
|
|
)
|
2018-07-30 01:25:13 +02:00
|
|
|
if not user_profile.can_subscribe_other_users():
|
2021-04-07 21:18:33 +02:00
|
|
|
# Guest users case will not be handled here as it will
|
|
|
|
# be handled by the decorator above.
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2021-02-12 08:19:30 +01:00
|
|
|
subscribers = {
|
|
|
|
principal_to_user_profile(user_profile, principal) for principal in principals
|
|
|
|
}
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2020-04-09 21:51:58 +02:00
|
|
|
subscribers = {user_profile}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(
|
|
|
|
realm, streams, subscribers, acting_user=user_profile, color_map=color_map
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-08-18 05:02:02 +02:00
|
|
|
# We can assume unique emails here for now, but we should eventually
|
|
|
|
# convert this function to be more id-centric.
|
2020-09-02 08:14:51 +02:00
|
|
|
email_to_user_profile: Dict[str, UserProfile] = {}
|
2017-08-18 05:02:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result: Dict[str, Any] = dict(
|
|
|
|
subscribed=defaultdict(list), already_subscribed=defaultdict(list)
|
|
|
|
)
|
2020-10-14 16:35:03 +02:00
|
|
|
for sub_info in subscribed:
|
|
|
|
subscriber = sub_info.user
|
|
|
|
stream = sub_info.stream
|
2015-11-23 14:35:16 +01:00
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
2017-08-18 05:02:02 +02:00
|
|
|
email_to_user_profile[subscriber.email] = subscriber
|
2020-10-14 16:35:03 +02:00
|
|
|
for sub_info in already_subscribed:
|
|
|
|
subscriber = sub_info.user
|
|
|
|
stream = sub_info.stream
|
2015-11-23 14:35:16 +01:00
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
|
|
|
|
2020-10-13 22:43:05 +02:00
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
|
|
|
|
send_messages_for_new_subscribers(
|
|
|
|
user_profile=user_profile,
|
|
|
|
subscribers=subscribers,
|
|
|
|
new_subscriptions=result["subscribed"],
|
|
|
|
email_to_user_profile=email_to_user_profile,
|
|
|
|
created_streams=created_streams,
|
|
|
|
announce=announce,
|
|
|
|
)
|
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [s.name for s in unauthorized_streams]
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|
2020-10-13 22:43:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-13 22:43:05 +02:00
|
|
|
def send_messages_for_new_subscribers(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
subscribers: Set[UserProfile],
|
|
|
|
new_subscriptions: Dict[str, List[str]],
|
|
|
|
email_to_user_profile: Dict[str, UserProfile],
|
|
|
|
created_streams: List[Stream],
|
|
|
|
announce: bool,
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
If you are subscribing lots of new users to new streams,
|
|
|
|
this function can be pretty expensive in terms of generating
|
|
|
|
lots of queries and sending lots of messages. We isolate
|
|
|
|
the code partly to make it easier to test things like
|
|
|
|
excessive query counts by mocking this function so that it
|
|
|
|
doesn't drown out query counts from other code.
|
|
|
|
"""
|
2020-04-09 21:51:58 +02:00
|
|
|
bots = {subscriber.email: subscriber.is_bot for subscriber in subscribers}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
newly_created_stream_names = {s.name for s in created_streams}
|
2017-05-16 01:32:50 +02:00
|
|
|
|
2021-12-29 17:54:08 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
mention_backend = MentionBackend(realm.id)
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2020-10-13 22:43:05 +02:00
|
|
|
if new_subscriptions:
|
|
|
|
for email, subscribed_stream_names in new_subscriptions.items():
|
2015-11-23 14:35:16 +01:00
|
|
|
if email == user_profile.email:
|
|
|
|
# Don't send a Zulip if you invited yourself.
|
|
|
|
continue
|
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
|
|
|
|
2017-05-16 01:32:50 +02:00
|
|
|
# For each user, we notify them about newly subscribed streams, except for
|
|
|
|
# streams that were newly created.
|
|
|
|
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
|
|
|
|
|
|
|
|
if not notify_stream_names:
|
|
|
|
continue
|
|
|
|
|
2020-06-26 15:17:12 +02:00
|
|
|
recipient_user = email_to_user_profile[email]
|
2021-03-08 11:48:53 +01:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, recipient_user.realm_id)
|
2020-06-26 15:17:12 +02:00
|
|
|
|
2017-05-16 17:10:42 +02:00
|
|
|
msg = you_were_just_subscribed_message(
|
|
|
|
acting_user=user_profile,
|
2020-06-26 15:17:12 +02:00
|
|
|
recipient_user=recipient_user,
|
2017-05-16 17:10:42 +02:00
|
|
|
stream_names=notify_stream_names,
|
|
|
|
)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
sender=sender,
|
2020-06-26 15:17:12 +02:00
|
|
|
recipient_user=recipient_user,
|
2021-02-12 08:19:30 +01:00
|
|
|
content=msg,
|
2021-12-29 17:54:08 +01:00
|
|
|
mention_backend=mention_backend,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2019-07-11 21:07:29 +02:00
|
|
|
if announce and len(created_streams) > 0:
|
2017-08-24 02:35:16 +02:00
|
|
|
notifications_stream = user_profile.realm.get_notifications_stream()
|
2015-11-23 14:35:16 +01:00
|
|
|
if notifications_stream is not None:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(notifications_stream.realm.default_language):
|
|
|
|
if len(created_streams) > 1:
|
2020-06-30 14:12:27 +02:00
|
|
|
content = _("{user_name} created the following streams: {stream_str}.")
|
2020-06-26 15:17:12 +02:00
|
|
|
else:
|
2020-06-30 14:12:27 +02:00
|
|
|
content = _("{user_name} created a new stream {stream_str}.")
|
2021-02-12 08:20:45 +01:00
|
|
|
topic = _("new streams")
|
2020-06-26 15:17:12 +02:00
|
|
|
|
2020-06-30 14:12:27 +02:00
|
|
|
content = content.format(
|
2021-12-07 21:41:45 +01:00
|
|
|
user_name=silent_mention_syntax_for_user(user_profile),
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_str=", ".join(f"#**{s.name}**" for s in created_streams),
|
2020-06-30 14:12:27 +02:00
|
|
|
)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2021-03-08 11:48:53 +01:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, notifications_stream.realm_id)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2016-12-03 00:04:17 +01:00
|
|
|
notifications.append(
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message(
|
|
|
|
sender=sender,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream=notifications_stream,
|
2017-04-27 19:44:09 +02:00
|
|
|
topic=topic,
|
2019-07-11 23:05:38 +02:00
|
|
|
content=content,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2019-02-07 02:05:34 +01:00
|
|
|
)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2019-07-11 20:51:23 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm and len(created_streams) > 0:
|
2021-03-08 11:48:53 +01:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id)
|
2017-04-27 00:03:21 +02:00
|
|
|
for stream in created_streams:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(stream.realm.default_language):
|
2022-03-29 09:17:28 +02:00
|
|
|
if stream.description == "":
|
|
|
|
stream_description = "*" + _("No description.") + "*"
|
|
|
|
else:
|
|
|
|
stream_description = stream.description
|
2020-06-26 15:17:12 +02:00
|
|
|
notifications.append(
|
|
|
|
internal_prep_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream=stream,
|
2022-08-08 19:53:11 +02:00
|
|
|
topic=str(Realm.STREAM_EVENTS_NOTIFICATION_TOPIC),
|
2022-03-04 22:28:37 +01:00
|
|
|
content=_(
|
2022-03-09 22:55:07 +01:00
|
|
|
"**{policy}** stream created by {user_name}. **Description:**"
|
2022-03-04 22:28:37 +01:00
|
|
|
).format(
|
2021-12-07 21:41:45 +01:00
|
|
|
user_name=silent_mention_syntax_for_user(user_profile),
|
2022-03-04 22:28:37 +01:00
|
|
|
policy=get_stream_permission_policy_name(
|
|
|
|
invite_only=stream.invite_only,
|
|
|
|
history_public_to_subscribers=stream.history_public_to_subscribers,
|
|
|
|
is_web_public=stream.is_web_public,
|
|
|
|
),
|
2022-03-09 22:55:07 +01:00
|
|
|
)
|
2022-03-29 09:17:28 +02:00
|
|
|
+ f"\n```` quote\n{stream_description}\n````",
|
2020-06-15 23:22:24 +02:00
|
|
|
),
|
2020-06-26 15:17:12 +02:00
|
|
|
)
|
2017-04-27 00:03:21 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(notifications) > 0:
|
2019-07-24 08:47:38 +02:00
|
|
|
do_send_messages(notifications, mark_as_read=[user_profile.id])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_subscribers_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-07-25 20:28:54 +02:00
|
|
|
stream_id: int = REQ("stream", converter=to_non_negative_int, path_only=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(
|
|
|
|
user_profile,
|
|
|
|
stream_id,
|
|
|
|
allow_realm_admin=True,
|
|
|
|
)
|
2021-07-17 00:29:45 +02:00
|
|
|
subscribers = get_subscriber_ids(stream, user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"subscribers": list(subscribers)})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
2019-02-28 22:20:24 +01:00
|
|
|
def get_streams_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
include_public: bool = REQ(json_validator=check_bool, default=True),
|
|
|
|
include_web_public: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_subscribed: bool = REQ(json_validator=check_bool, default=True),
|
|
|
|
include_all_active: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_default: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_owner_subscribed: bool = REQ(json_validator=check_bool, default=False),
|
2019-02-28 22:20:24 +01:00
|
|
|
) -> HttpResponse:
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
streams = do_get_streams(
|
|
|
|
user_profile,
|
|
|
|
include_public=include_public,
|
|
|
|
include_web_public=include_web_public,
|
|
|
|
include_subscribed=include_subscribed,
|
|
|
|
include_all_active=include_all_active,
|
|
|
|
include_default=include_default,
|
|
|
|
include_owner_subscribed=include_owner_subscribed,
|
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"streams": streams})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-18 13:54:35 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_stream_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
) -> HttpResponse:
|
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id, allow_realm_admin=True)
|
|
|
|
return json_success(request, data={"stream": stream.to_dict()})
|
|
|
|
|
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
@has_request_variables
|
2020-08-21 17:12:05 +02:00
|
|
|
def get_topics_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
|
|
|
|
) -> HttpResponse:
|
2020-08-21 17:12:05 +02:00
|
|
|
|
|
|
|
if not maybe_user_profile.is_authenticated:
|
|
|
|
is_web_public_query = True
|
|
|
|
user_profile: Optional[UserProfile] = None
|
|
|
|
else:
|
|
|
|
is_web_public_query = False
|
|
|
|
assert isinstance(maybe_user_profile, UserProfile)
|
|
|
|
user_profile = maybe_user_profile
|
|
|
|
assert user_profile is not None
|
2016-10-27 15:54:49 +02:00
|
|
|
|
2020-08-21 17:12:05 +02:00
|
|
|
if is_web_public_query:
|
|
|
|
realm = get_valid_realm_from_request(request)
|
|
|
|
stream = access_web_public_stream(stream_id, realm)
|
2021-07-25 16:31:12 +02:00
|
|
|
result = get_topic_history_for_public_stream(
|
|
|
|
recipient_id=assert_is_not_none(stream.recipient_id)
|
|
|
|
)
|
2020-08-21 17:12:05 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
assert user_profile is not None
|
|
|
|
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2020-08-21 17:12:05 +02:00
|
|
|
|
2022-06-15 04:59:36 +02:00
|
|
|
assert stream.recipient_id is not None
|
2020-08-21 17:12:05 +02:00
|
|
|
result = get_topic_history_for_stream(
|
|
|
|
user_profile=user_profile,
|
2020-10-16 17:45:21 +02:00
|
|
|
recipient_id=stream.recipient_id,
|
2020-08-21 17:12:05 +02:00
|
|
|
public_history=stream.is_history_public_to_subscribers(),
|
|
|
|
)
|
2016-10-27 15:54:49 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=dict(topics=result))
|
2016-10-27 15:54:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-18 17:40:54 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def delete_in_topic(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-07-25 22:20:15 +02:00
|
|
|
stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
topic_name: str = REQ("topic_name"),
|
|
|
|
) -> HttpResponse:
|
2022-09-17 23:01:55 +02:00
|
|
|
stream, ignored_sub = access_stream_by_id(user_profile, stream_id)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
2021-07-25 16:31:12 +02:00
|
|
|
messages = messages_for_topic(assert_is_not_none(stream.recipient_id), topic_name)
|
2019-01-18 17:40:54 +01:00
|
|
|
if not stream.is_history_public_to_subscribers():
|
|
|
|
# Don't allow the user to delete messages that they don't have access to.
|
|
|
|
deletable_message_ids = UserMessage.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile=user_profile, message_id__in=messages
|
|
|
|
).values_list("message_id", flat=True)
|
2021-06-03 15:46:13 +02:00
|
|
|
messages = messages.filter(id__in=deletable_message_ids)
|
|
|
|
|
2022-09-20 21:24:59 +02:00
|
|
|
# Topics can be large enough that this request will inevitably time out.
|
|
|
|
# In such a case, it's good for some progress to be accomplished, so that
|
|
|
|
# full deletion can be achieved by repeating the request. For that purpose,
|
|
|
|
# we delete messages in atomic batches, committing after each batch.
|
|
|
|
# TODO: Ideally this should be moved to the deferred_work queue.
|
|
|
|
batch_size = RETENTION_STREAM_MESSAGE_BATCH_SIZE
|
|
|
|
while True:
|
|
|
|
with transaction.atomic(durable=True):
|
|
|
|
messages_to_delete = messages.order_by("-id")[0:batch_size].select_for_update(
|
|
|
|
of=("self",)
|
|
|
|
)
|
|
|
|
if not messages_to_delete:
|
|
|
|
break
|
|
|
|
do_delete_messages(user_profile.realm, messages_to_delete)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 03:22:41 +02:00
|
|
|
@require_post
|
|
|
|
@authenticated_json_view
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_stream_exists(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_name: str = REQ("stream"),
|
2021-04-07 22:00:44 +02:00
|
|
|
autosubscribe: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
|
|
|
try:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-01-12 01:41:16 +01:00
|
|
|
except JsonableError as e:
|
2021-07-04 08:52:23 +02:00
|
|
|
raise ResourceNotFoundError(e.msg)
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# access_stream functions return a subscription if and only if we
|
|
|
|
# are already subscribed.
|
2017-07-25 02:15:40 +02:00
|
|
|
result = {"subscribed": sub is not None}
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# If we got here, we're either subscribed or the stream is public.
|
|
|
|
# So if we're not yet subscribed and autosubscribe is enabled, we
|
|
|
|
# should join.
|
|
|
|
if sub is None and autosubscribe:
|
2021-02-12 08:19:30 +01:00
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [user_profile], acting_user=user_profile
|
|
|
|
)
|
2017-01-12 01:41:16 +01:00
|
|
|
result["subscribed"] = True
|
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result) # results are ignored for HEAD requests
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-01-03 18:31:43 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_get_stream_id(
|
2021-02-12 08:20:45 +01:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_name: str = REQ("stream")
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"stream_id": stream.id})
|
2017-01-03 18:31:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_subscriptions_property(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_id: int = REQ(json_validator=check_int),
|
2021-02-12 08:19:30 +01:00
|
|
|
property: str = REQ(),
|
|
|
|
value: str = REQ(),
|
|
|
|
) -> HttpResponse:
|
|
|
|
subscription_data = [{"property": property, "stream_id": stream_id, "value": value}]
|
|
|
|
return update_subscription_properties_backend(
|
|
|
|
request, user_profile, subscription_data=subscription_data
|
|
|
|
)
|
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscription_properties_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
subscription_data: List[Dict[str, Any]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_list(
|
2021-02-12 08:19:30 +01:00
|
|
|
check_dict(
|
|
|
|
[
|
|
|
|
("stream_id", check_int),
|
|
|
|
("property", check_string),
|
|
|
|
("value", check_union([check_string, check_bool])),
|
|
|
|
]
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
[{"stream_id": "1", "property": "is_muted", "value": False},
|
2017-05-09 07:01:42 +02:00
|
|
|
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
property_converters = {
|
|
|
|
"color": check_color,
|
|
|
|
"in_home_view": check_bool,
|
|
|
|
"is_muted": check_bool,
|
|
|
|
"desktop_notifications": check_bool,
|
|
|
|
"audible_notifications": check_bool,
|
|
|
|
"push_notifications": check_bool,
|
|
|
|
"email_notifications": check_bool,
|
|
|
|
"pin_to_top": check_bool,
|
|
|
|
"wildcard_mentions_notify": check_bool,
|
|
|
|
}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
for change in subscription_data:
|
2017-05-09 07:01:42 +02:00
|
|
|
stream_id = change["stream_id"]
|
2015-11-23 14:35:16 +01:00
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Unknown subscription property: {}").format(property))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2017-01-30 01:45:00 +01:00
|
|
|
if sub is None:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Not subscribed to stream id {}").format(stream_id))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
try:
|
|
|
|
value = property_converters[property](property, value)
|
|
|
|
except ValidationError as error:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(error.message)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_change_subscription_property(
|
|
|
|
user_profile, sub, stream, property, value, acting_user=user_profile
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-11-24 18:00:12 +01:00
|
|
|
# TODO: Do this more generally, see update_realm_user_settings_defaults.realm.py
|
|
|
|
from zerver.lib.request import RequestNotes
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-11-24 18:00:12 +01:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
|
|
|
for req_var in request.POST:
|
|
|
|
if req_var not in request_notes.processed_parameters:
|
|
|
|
request_notes.ignored_parameters.add(req_var)
|
|
|
|
|
|
|
|
result: Dict[str, Any] = {}
|
|
|
|
if len(request_notes.ignored_parameters) > 0:
|
|
|
|
result["ignored_parameters_unsupported"] = list(request_notes.ignored_parameters)
|
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|